commit
stringlengths
40
40
old_file
stringlengths
4
150
new_file
stringlengths
4
150
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
501
message
stringlengths
15
4.06k
lang
stringclasses
4 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
diff
stringlengths
0
4.35k
73477dbb9176f7c71a1ce3bbab70313fb65578f8
uk_results/serializers.py
uk_results/serializers.py
from __future__ import unicode_literals from rest_framework import serializers from .models import PostResult, ResultSet, CandidateResult from candidates.serializers import ( MembershipSerializer, MinimalPostExtraSerializer ) class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots_reported', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', # 'post_result', 'user', 'user_id', ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True) class PostResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = PostResult fields = ( 'id', 'url', 'confirmed', 'post', 'result_sets', ) post = MinimalPostExtraSerializer(source='post.extra') result_sets = ResultSetSerializer(many=True)
from __future__ import unicode_literals from rest_framework import serializers from .models import PostResult, ResultSet, CandidateResult from candidates.serializers import ( MembershipSerializer, MinimalPostExtraSerializer ) class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots_reported', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', # 'post_result', 'user', 'user_id', 'review_status', ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True) class PostResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = PostResult fields = ( 'id', 'url', 'confirmed', 'post', 'result_sets', ) post = MinimalPostExtraSerializer(source='post.extra') result_sets = ResultSetSerializer(many=True)
Add review status to serializer
Add review status to serializer
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
--- +++ @@ -31,7 +31,7 @@ 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', # 'post_result', - 'user', 'user_id', + 'user', 'user_id', 'review_status', ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username')
2aa415cae1cb7ed0bb2b7fdaf51d9d5eaceaa768
sweettooth/extensions/admin.py
sweettooth/extensions/admin.py
from django.contrib import admin from extensions.models import Extension, ExtensionVersion from extensions.models import STATUS_ACTIVE, STATUS_REJECTED from review.models import CodeReview class CodeReviewAdmin(admin.TabularInline): model = CodeReview fields = 'reviewer', 'comments', class ExtensionVersionAdmin(admin.ModelAdmin): list_display = 'title', 'status', list_display_links = 'title', actions = 'approve', 'reject', def title(self, ver): return "%s (%d)" % (ver.extension.uuid, ver.version) title.short_description = "Extension (version)" inlines = [CodeReviewAdmin] def approve(self, request, queryset): queryset.update(status=STATUS_ACTIVE) def reject(self, request, queryset): queryset.update(status=STATUS_REJECTED) admin.site.register(ExtensionVersion, ExtensionVersionAdmin) class ExtensionVersionInline(admin.TabularInline): model = ExtensionVersion fields = 'version', 'status', extra = 0 class ExtensionAdmin(admin.ModelAdmin): list_display = 'name', 'uuid', 'num_versions', 'creator', list_display_links = 'name', 'uuid', search_fields = ('uuid', 'name') def num_versions(self, ext): return ext.versions.count() num_versions.short_description = "#V" inlines = [ExtensionVersionInline] admin.site.register(Extension, ExtensionAdmin)
from django.contrib import admin from extensions.models import Extension, ExtensionVersion from extensions.models import STATUS_ACTIVE, STATUS_REJECTED from review.models import CodeReview class CodeReviewAdmin(admin.TabularInline): model = CodeReview fields = 'reviewer', 'comments', class ExtensionVersionAdmin(admin.ModelAdmin): list_display = 'title', 'status', list_display_links = 'title', actions = 'approve', 'reject', def title(self, ver): return "%s (%d)" % (ver.extension.uuid, ver.version) title.short_description = "Extension (version)" inlines = [CodeReviewAdmin] def approve(self, request, queryset): queryset.update(status=STATUS_ACTIVE) def reject(self, request, queryset): queryset.update(status=STATUS_REJECTED) admin.site.register(ExtensionVersion, ExtensionVersionAdmin) class ExtensionVersionInline(admin.TabularInline): model = ExtensionVersion fields = 'version', 'status', extra = 0 class ExtensionAdmin(admin.ModelAdmin): list_display = 'name', 'uuid', 'num_versions', 'creator', list_display_links = 'name', 'uuid', search_fields = ('uuid', 'name') raw_id_fields = ('user',) def num_versions(self, ext): return ext.versions.count() num_versions.short_description = "#V" inlines = [ExtensionVersionInline] admin.site.register(Extension, ExtensionAdmin)
Make the user field into a raw field
extensions: Make the user field into a raw field It's a bit annoying having to navigate through a 20,000 line combobox.
Python
agpl-3.0
GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,GNOME/extensions-web,magcius/sweettooth,magcius/sweettooth
--- +++ @@ -37,6 +37,7 @@ list_display = 'name', 'uuid', 'num_versions', 'creator', list_display_links = 'name', 'uuid', search_fields = ('uuid', 'name') + raw_id_fields = ('user',) def num_versions(self, ext): return ext.versions.count()
f1e50c1caeeec5b8e443f634534bfed46f26dbdf
2017/async-socket-server/simple-client.py
2017/async-socket-server/simple-client.py
import sys, time import socket def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) sockobj.send(b'foo^1234$jo') sockobj.send(b'sdfsdfsdfsdf^a') sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') buf = b'' while True: buf += sockobj.recv(1024) print(buf) sockobj.close() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
import sys, time import socket import threading class ReadThread(threading.Thread): def __init__(self, sockobj): super().__init__() self.sockobj = sockobj self.bufsize = 8 * 1024 def run(self): while True: buf = self.sockobj.recv(self.bufsize) print('Received:', buf) if b'1111' in buf: break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) rthread = ReadThread(sockobj) rthread.start() sockobj.send(b'foo^1234$jo') time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') time.sleep(1.0) sockobj.close() rthread.join() def main(): if len(sys.argv) <= 2: print("Error, expecting <host> <port>") sys.exit(1) host = sys.argv[1] port = int(sys.argv[2]) make_new_connection("foo", host, port) if __name__ == '__main__': main()
Modify client to read the socket concurrently
Modify client to read the socket concurrently
Python
unlicense
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
--- +++ @@ -1,21 +1,37 @@ import sys, time import socket +import threading + + +class ReadThread(threading.Thread): + def __init__(self, sockobj): + super().__init__() + self.sockobj = sockobj + self.bufsize = 8 * 1024 + + def run(self): + while True: + buf = self.sockobj.recv(self.bufsize) + print('Received:', buf) + if b'1111' in buf: + break def make_new_connection(name, host, port): sockobj = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sockobj.connect((host, port)) + rthread = ReadThread(sockobj) + rthread.start() sockobj.send(b'foo^1234$jo') + time.sleep(1.0) sockobj.send(b'sdfsdfsdfsdf^a') + time.sleep(1.0) sockobj.send(b'fkfkf0000$dfk^$sdf^a$^kk$') - - buf = b'' - while True: - buf += sockobj.recv(1024) - print(buf) + time.sleep(1.0) sockobj.close() + rthread.join() def main():
c17dc4a9876ac45b88307d2ab741655bae6c5dc7
inboxen/tests/settings.py
inboxen/tests/settings.py
from __future__ import absolute_import import os os.environ['INBOX_TESTING'] = '1' os.environ["INBOXEN_ADMIN_ACCESS"] = '1' from settings import * CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache" } } db = os.environ.get('DB') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': 'postgres', }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
from __future__ import absolute_import import os os.environ['INBOX_TESTING'] = '1' os.environ["INBOXEN_ADMIN_ACCESS"] = '1' from settings import * CACHES = { "default": { "BACKEND": "django.core.cache.backends.locmem.LocMemCache" } } db = os.environ.get('DB') postgres_user = os.environ.get('PG_USER', 'postgres') SECRET_KEY = "This is a test, you don't need secrets" if db == "sqlite": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } elif db == "postgres": DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', 'USER': postgres_user, }, } else: raise NotImplementedError("Please check tests/settings.py for valid DB values")
Allow setting of postgres user via an environment variable
Allow setting of postgres user via an environment variable Touch #73
Python
agpl-3.0
Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen,Inboxen/Inboxen
--- +++ @@ -12,6 +12,7 @@ } db = os.environ.get('DB') +postgres_user = os.environ.get('PG_USER', 'postgres') SECRET_KEY = "This is a test, you don't need secrets" @@ -27,7 +28,7 @@ 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'inboxen', - 'USER': 'postgres', + 'USER': postgres_user, }, } else:
aecc14ea11cae2bb27ee2534a229e7af8453053e
readthedocs/rtd_tests/tests/test_hacks.py
readthedocs/rtd_tests/tests/test_hacks.py
from django.test import TestCase from readthedocs.core import hacks class TestHacks(TestCase): fixtures = ['eric.json', 'test_data.json'] def setUp(self): hacks.patch_meta_path() def tearDown(self): hacks.unpatch_meta_path() def test_hack_failed_import(self): import boogy self.assertTrue(str(boogy), "<Silly Human, I'm not real>") def test_hack_correct_import(self): import itertools self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
from django.test import TestCase from core import hacks class TestHacks(TestCase): fixtures = ['eric.json', 'test_data.json'] def setUp(self): hacks.patch_meta_path() def tearDown(self): hacks.unpatch_meta_path() def test_hack_failed_import(self): import boogy self.assertTrue(str(boogy), "<Silly Human, I'm not real>") def test_hack_correct_import(self): import itertools self.assertFalse(str(itertools), "<Silly Human, I'm not real>")
Fix import to not include project.
Fix import to not include project.
Python
mit
agjohnson/readthedocs.org,pombredanne/readthedocs.org,wijerasa/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,sunnyzwh/readthedocs.org,raven47git/readthedocs.org,CedarLogic/readthedocs.org,michaelmcandrew/readthedocs.org,fujita-shintaro/readthedocs.org,GovReady/readthedocs.org,singingwolfboy/readthedocs.org,kenwang76/readthedocs.org,agjohnson/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,kenshinthebattosai/readthedocs.org,VishvajitP/readthedocs.org,SteveViss/readthedocs.org,royalwang/readthedocs.org,KamranMackey/readthedocs.org,stevepiercy/readthedocs.org,CedarLogic/readthedocs.org,kdkeyser/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,mhils/readthedocs.org,michaelmcandrew/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,kenwang76/readthedocs.org,takluyver/readthedocs.org,cgourlay/readthedocs.org,wanghaven/readthedocs.org,sunnyzwh/readthedocs.org,sid-kap/readthedocs.org,laplaceliu/readthedocs.org,rtfd/readthedocs.org,espdev/readthedocs.org,sils1297/readthedocs.org,ojii/readthedocs.org,tddv/readthedocs.org,cgourlay/readthedocs.org,hach-que/readthedocs.org,Tazer/readthedocs.org,istresearch/readthedocs.org,cgourlay/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,Tazer/readthedocs.org,laplaceliu/readthedocs.org,kenshinthebattosai/readthedocs.org,davidfischer/readthedocs.org,davidfischer/readthedocs.org,sid-kap/readthedocs.org,soulshake/readthedocs.org,laplaceliu/readthedocs.org,nyergler/pythonslides,takluyver/readthedocs.org,kenwang76/readthedocs.org,mhils/readthedocs.org,clarkperkins/readthedocs.org,d0ugal/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,wijerasa/readthedocs.org,stevepiercy/readthedocs.org,KamranMackey/readthedocs.org,alex/readthedocs.org,singingwolfboy/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,kenshinthebattosai/readthedocs.org,royalwang/readthedocs.org,dirn/readthedocs.org,espdev/readthedocs.org,rtfd/readthedocs.org,istresearch/readthedocs.org,emawind84/readthedocs.org,techtonik/readthedocs.org,emawind84/readthedocs.org,fujita-shintaro/readthedocs.org,SteveViss/readthedocs.org,sunnyzwh/readthedocs.org,espdev/readthedocs.org,LukasBoersma/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,Carreau/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,dirn/readthedocs.org,titiushko/readthedocs.org,titiushko/readthedocs.org,wanghaven/readthedocs.org,alex/readthedocs.org,mhils/readthedocs.org,LukasBoersma/readthedocs.org,kdkeyser/readthedocs.org,stevepiercy/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,dirn/readthedocs.org,atsuyim/readthedocs.org,alex/readthedocs.org,michaelmcandrew/readthedocs.org,d0ugal/readthedocs.org,d0ugal/readthedocs.org,wanghaven/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,fujita-shintaro/readthedocs.org,raven47git/readthedocs.org,nyergler/pythonslides,Carreau/readthedocs.org,atsuyim/readthedocs.org,soulshake/readthedocs.org,sils1297/readthedocs.org,alex/readthedocs.org,espdev/readthedocs.org,Carreau/readthedocs.org,kdkeyser/readthedocs.org,wanghaven/readthedocs.org,jerel/readthedocs.org,clarkperkins/readthedocs.org,johncosta/private-readthedocs.org,nyergler/pythonslides,ojii/readthedocs.org,nikolas/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,nikolas/readthedocs.org,wijerasa/readthedocs.org,VishvajitP/readthedocs.org,raven47git/readthedocs.org,istresearch/readthedocs.org,GovReady/readthedocs.org,johncosta/private-readthedocs.org,sid-kap/readthedocs.org,clarkperkins/readthedocs.org,mrshoki/readthedocs.org,Carreau/readthedocs.org,d0ugal/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,safwanrahman/readthedocs.org,ojii/readthedocs.org,asampat3090/readthedocs.org,mrshoki/readthedocs.org,attakei/readthedocs-oauth,SteveViss/readthedocs.org,mrshoki/readthedocs.org,nikolas/readthedocs.org,techtonik/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,jerel/readthedocs.org,davidfischer/readthedocs.org,VishvajitP/readthedocs.org,kdkeyser/readthedocs.org,hach-que/readthedocs.org,GovReady/readthedocs.org,attakei/readthedocs-oauth,dirn/readthedocs.org,CedarLogic/readthedocs.org,sid-kap/readthedocs.org,KamranMackey/readthedocs.org,jerel/readthedocs.org,asampat3090/readthedocs.org,johncosta/private-readthedocs.org,gjtorikian/readthedocs.org,Tazer/readthedocs.org,safwanrahman/readthedocs.org,singingwolfboy/readthedocs.org,soulshake/readthedocs.org,attakei/readthedocs-oauth,gjtorikian/readthedocs.org,wijerasa/readthedocs.org,takluyver/readthedocs.org,davidfischer/readthedocs.org,LukasBoersma/readthedocs.org,LukasBoersma/readthedocs.org,agjohnson/readthedocs.org,safwanrahman/readthedocs.org,emawind84/readthedocs.org,titiushko/readthedocs.org,Tazer/readthedocs.org,mrshoki/readthedocs.org,atsuyim/readthedocs.org,hach-que/readthedocs.org,istresearch/readthedocs.org,agjohnson/readthedocs.org,VishvajitP/readthedocs.org,SteveViss/readthedocs.org,takluyver/readthedocs.org,nikolas/readthedocs.org,rtfd/readthedocs.org,gjtorikian/readthedocs.org,mhils/readthedocs.org
--- +++ @@ -1,5 +1,5 @@ from django.test import TestCase -from readthedocs.core import hacks +from core import hacks class TestHacks(TestCase): fixtures = ['eric.json', 'test_data.json']
ada4e94fb4b6de1303d4c4ad47d239bbf0699f3e
dev_settings.py
dev_settings.py
""" This is a home for shared dev settings. Feel free to add anything that all devs should have set. Add `from dev_settings import *` to the top of your localsettings file to use. You can then override or append to any of these settings there. """ import os LOCAL_APPS = ( 'django_extensions', ) ####### Django Extensions ####### # These things will be imported when you run ./manage.py shell_plus SHELL_PLUS_POST_IMPORTS = ( # Models ('corehq.apps.domain.models', 'Domain'), ('corehq.apps.groups.models', 'Group'), ('corehq.apps.locations.models', 'Location'), ('corehq.apps.users.models', ('CommCareUser', 'CommCareCase')), ('couchforms.models', 'XFormInstance'), # Data querying utils ('dimagi.utils.couch.database', 'get_db'), ('corehq.apps.sofabed.models', ('FormData', 'CaseData')), ('corehq.apps.es', '*'), ) ALLOWED_HOSTS = ['*'] FIX_LOGGER_ERROR_OBFUSCATION = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'commcarehq', 'USER': 'commcarehq', 'PASSWORD': 'commcarehq', 'HOST': 'localhost', 'PORT': '5432' } } BOWER_PATH = os.popen('which bower').read().strip()
""" This is a home for shared dev settings. Feel free to add anything that all devs should have set. Add `from dev_settings import *` to the top of your localsettings file to use. You can then override or append to any of these settings there. """ import os LOCAL_APPS = ( 'django_extensions', ) ####### Django Extensions ####### # These things will be imported when you run ./manage.py shell_plus SHELL_PLUS_POST_IMPORTS = ( # Models ('corehq.apps.domain.models', 'Domain'), ('corehq.apps.groups.models', 'Group'), ('corehq.apps.locations.models', 'Location'), ('corehq.apps.users.models', ('CommCareUser', 'CommCareCase')), ('couchforms.models', 'XFormInstance'), # Data querying utils ('dimagi.utils.couch.database', 'get_db'), ('corehq.apps.sofabed.models', ('FormData', 'CaseData')), ('corehq.apps.es', '*'), ) ALLOWED_HOSTS = ['*'] FIX_LOGGER_ERROR_OBFUSCATION = True DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'commcarehq', 'USER': 'commcarehq', 'PASSWORD': 'commcarehq', 'HOST': 'localhost', 'PORT': '5432' } } BOWER_PATH = os.popen('which bower').read().strip() CACHES = {'default': {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}
Add dummy cache setting so code can be loaded
Add dummy cache setting so code can be loaded I mimic what will happen on ReadTheDocs locally by doing the following: * Don't start my hq environment (no couch, pillowtop, redis, etc) * Enter my hq virtualenv * Move or rename `localsettings.py` so it won't be found * `$ cd docs/` * `$ make html` Basically it needs to be able to import stuff without having a localsettings (I just put the essentials in `dev_settings.py`) or db connection. In the past we've had some singleton-type objects which are initialized on import and require a db connection to init. These need to either be lazily initialized (you can then memoize it to keep it a singleton), or mocked out in `docs/conf.py`.
Python
bsd-3-clause
qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq
--- +++ @@ -42,3 +42,5 @@ } BOWER_PATH = os.popen('which bower').read().strip() + +CACHES = {'default': {'BACKEND': 'django.core.cache.backends.dummy.DummyCache'}}
09f1a21fd3a59e31468470e0f5de7eec7c8f3507
ynr/apps/popolo/serializers.py
ynr/apps/popolo/serializers.py
from rest_framework import serializers from popolo import models as popolo_models from parties.serializers import MinimalPartySerializer class MinimalPostSerializer(serializers.ModelSerializer): class Meta: model = popolo_models.Post fields = ("id", "label", "slug") id = serializers.ReadOnlyField(source="slug") label = serializers.ReadOnlyField() class NominationAndResultSerializer(serializers.HyperlinkedModelSerializer): """ A representation of a Membership with only the information on the ballot paper, and results if we have them. """ class Meta: model = popolo_models.Membership fields = ("elected", "party_list_position", "person", "party") elected = serializers.ReadOnlyField() party_list_position = serializers.ReadOnlyField() person = serializers.ReadOnlyField(source="person.name") party = MinimalPartySerializer(read_only=True)
from rest_framework import serializers from popolo import models as popolo_models from parties.serializers import MinimalPartySerializer class MinimalPostSerializer(serializers.ModelSerializer): class Meta: model = popolo_models.Post fields = ("id", "label", "slug") id = serializers.ReadOnlyField(source="slug") label = serializers.ReadOnlyField() class NominationAndResultSerializer(serializers.HyperlinkedModelSerializer): """ A representation of a Membership with only the information on the ballot paper, and results if we have them. """ class Meta: model = popolo_models.Membership fields = ("id", "elected", "party_list_position", "name", "party") elected = serializers.ReadOnlyField() party_list_position = serializers.ReadOnlyField() name = serializers.ReadOnlyField(source="person.name") party = MinimalPartySerializer(read_only=True)
Remove membership internal ID and change person to name
Remove membership internal ID and change person to name
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
--- +++ @@ -22,9 +22,9 @@ class Meta: model = popolo_models.Membership - fields = ("elected", "party_list_position", "person", "party") + fields = ("id", "elected", "party_list_position", "name", "party") elected = serializers.ReadOnlyField() party_list_position = serializers.ReadOnlyField() - person = serializers.ReadOnlyField(source="person.name") + name = serializers.ReadOnlyField(source="person.name") party = MinimalPartySerializer(read_only=True)
de7854ddf9577e9cd14a630503ce514d19a0a235
demo/app/launch.py
demo/app/launch.py
def main(): from psi.experiment.workbench import PSIWorkbench workbench = PSIWorkbench() io_manifest = 'io_manifest.IOManifest' controller_manifest = 'simple_experiment.ControllerManifest' workbench.register_core_plugins(io_manifest, controller_manifest) workbench.start_workspace('demo') if __name__ == '__main__': main()
def main(): import logging logging.basicConfig(level='DEBUG') from psi.experiment.workbench import PSIWorkbench workbench = PSIWorkbench() io_manifest = 'io_manifest.IOManifest' controller_manifest = 'simple_experiment.ControllerManifest' workbench.register_core_plugins(io_manifest, controller_manifest) workbench.start_workspace('demo') if __name__ == '__main__': main()
Add debugging output to app demo
Add debugging output to app demo
Python
mit
bburan/psiexperiment
--- +++ @@ -1,4 +1,7 @@ def main(): + import logging + logging.basicConfig(level='DEBUG') + from psi.experiment.workbench import PSIWorkbench workbench = PSIWorkbench()
b1106407aa9695d0ca007b53af593e25e9bb1769
saleor/plugins/migrations/0004_drop_support_for_env_vatlayer_access_key.py
saleor/plugins/migrations/0004_drop_support_for_env_vatlayer_access_key.py
from django.db import migrations def assign_access_key(apps, schema): vatlayer_configuration = ( apps.get_model("plugins", "PluginConfiguration") .objects.filter(identifier="mirumee.taxes.vatlayer") .first() ) if vatlayer_configuration: vatlayer_configuration.active = False vatlayer_configuration.save() class Migration(migrations.Migration): dependencies = [ ("plugins", "0003_auto_20200429_0142"), ] operations = [ migrations.RunPython(assign_access_key), ]
from django.db import migrations def deactivate_vatlayer(apps, schema): vatlayer_configuration = ( apps.get_model("plugins", "PluginConfiguration") .objects.filter(identifier="mirumee.taxes.vatlayer") .first() ) if vatlayer_configuration: vatlayer_configuration.active = False vatlayer_configuration.save() class Migration(migrations.Migration): dependencies = [ ("plugins", "0003_auto_20200429_0142"), ] operations = [ migrations.RunPython(deactivate_vatlayer), ]
Change migration name to more proper
Change migration name to more proper
Python
bsd-3-clause
mociepka/saleor,mociepka/saleor,mociepka/saleor
--- +++ @@ -1,7 +1,7 @@ from django.db import migrations -def assign_access_key(apps, schema): +def deactivate_vatlayer(apps, schema): vatlayer_configuration = ( apps.get_model("plugins", "PluginConfiguration") .objects.filter(identifier="mirumee.taxes.vatlayer") @@ -20,5 +20,5 @@ ] operations = [ - migrations.RunPython(assign_access_key), + migrations.RunPython(deactivate_vatlayer), ]
e0c926667a32031b5d43ec1701fe7577282176ca
rest_flex_fields/utils.py
rest_flex_fields/utils.py
def is_expanded(request, key): """ Examines request object to return boolean of whether passed field is expanded. """ expand = request.query_params.get("expand", "") expand_fields = [] for e in expand.split(","): expand_fields.extend([e for e in e.split(".")]) return "~all" in expand_fields or key in expand_fields def split_levels(fields): """ Convert dot-notation such as ['a', 'a.b', 'a.d', 'c'] into current-level fields ['a', 'c'] and next-level fields {'a': ['b', 'd']}. """ first_level_fields = [] next_level_fields = {} if not fields: return first_level_fields, next_level_fields if not isinstance(fields, list): fields = [a.strip() for a in fields.split(",") if a.strip()] for e in fields: if "." in e: first_level, next_level = e.split(".", 1) first_level_fields.append(first_level) next_level_fields.setdefault(first_level, []).append(next_level) else: first_level_fields.append(e) first_level_fields = list(set(first_level_fields)) return first_level_fields, next_level_fields
try: # Python 3 from collections.abc import Iterable string_types = (str,) except ImportError: # Python 2 from collections import Iterable string_types = (str, unicode) def is_expanded(request, key): """ Examines request object to return boolean of whether passed field is expanded. """ expand = request.query_params.get("expand", "") expand_fields = [] for e in expand.split(","): expand_fields.extend([e for e in e.split(".")]) return "~all" in expand_fields or key in expand_fields def split_levels(fields): """ Convert dot-notation such as ['a', 'a.b', 'a.d', 'c'] into current-level fields ['a', 'c'] and next-level fields {'a': ['b', 'd']}. """ first_level_fields = [] next_level_fields = {} if not fields: return first_level_fields, next_level_fields assert ( isinstance(fields, Iterable) ), "`fields` must be iterable (e.g. list, tuple, or generator)" if isinstance(fields, string_types): fields = [a.strip() for a in fields.split(",") if a.strip()] for e in fields: if "." in e: first_level, next_level = e.split(".", 1) first_level_fields.append(first_level) next_level_fields.setdefault(first_level, []).append(next_level) else: first_level_fields.append(e) first_level_fields = list(set(first_level_fields)) return first_level_fields, next_level_fields
Handle other iterable types gracefully in split_level utility function
Handle other iterable types gracefully in split_level utility function
Python
mit
rsinger86/drf-flex-fields
--- +++ @@ -1,3 +1,13 @@ +try: + # Python 3 + from collections.abc import Iterable + string_types = (str,) +except ImportError: + # Python 2 + from collections import Iterable + string_types = (str, unicode) + + def is_expanded(request, key): """ Examines request object to return boolean of whether passed field is expanded. @@ -23,7 +33,11 @@ if not fields: return first_level_fields, next_level_fields - if not isinstance(fields, list): + assert ( + isinstance(fields, Iterable) + ), "`fields` must be iterable (e.g. list, tuple, or generator)" + + if isinstance(fields, string_types): fields = [a.strip() for a in fields.split(",") if a.strip()] for e in fields: if "." in e:
defb736895d5f58133b9632c85d8064669ee897a
blueLed.py
blueLed.py
''' Dr Who Box: Blue Effects LED ''' from __future__ import print_function import RPi.GPIO as GPIO import time from multiprocessing import Process import math # Define PINS LED = 18 # Use numbering based on P1 header GPIO.setmode(GPIO.BOARD) GPIO.setwarnings(False) GPIO.setup(LED, GPIO.OUT, GPIO.LOW) def pulsateLed(): pwm = GPIO.PWM(LED, 100) pwm.start(0) values = [math.sin(x*math.pi/180.0) for x in range (0,181)] values = [int(100*x**3) for x in values] increasing = True count = 0 delay = 0.02 pwm.start(0) while True: pwm.ChangeDutyCycle(values[count]) time.sleep(delay) if increasing: count += 1 else: count -= 1 if (count >= len(values)-1): increasing = False if (count <= 0): increasing = True # Wait forever... try: p = Process(target=pulsateLed) p.start() while True: time.sleep(1) print(time.asctime(),'and python is running!') except: GPIO.cleanup() p.terminate()
''' Dr Who Box: Blue Effects LED ''' from __future__ import print_function import RPi.GPIO as GPIO import time from multiprocessing import Process import math # Define PINS LED = 18 # Use numbering based on P1 header GPIO.setmode(GPIO.BOARD) GPIO.setwarnings(False) GPIO.setup(LED, GPIO.OUT, GPIO.LOW) def pulsateLed(): pwm = GPIO.PWM(LED, 100) pwm.start(0) values = [math.sin(x * math.pi / 180.0) for x in range(0, 181)] values = [int(100 * x ** 3) for x in values] increasing = True count = 0 delay = 0.02 pwm.start(0) while True: pwm.ChangeDutyCycle(values[count]) if increasing: count += 1 else: count -= 1 if (count >= len(values) - 1): increasing = False elif (count <= 0): increasing = True time.sleep(delay) # Loop forever... try: p = Process(target=pulsateLed) p.start() while True: time.sleep(1) print(time.asctime(), 'and python is running!') except: GPIO.cleanup() p.terminate()
Tidy up and apply PEP8 guidelines.
Tidy up and apply PEP8 guidelines.
Python
mit
davidb24v/drwho
--- +++ @@ -22,36 +22,36 @@ def pulsateLed(): pwm = GPIO.PWM(LED, 100) pwm.start(0) - values = [math.sin(x*math.pi/180.0) for x in range (0,181)] - values = [int(100*x**3) for x in values] + values = [math.sin(x * math.pi / 180.0) for x in range(0, 181)] + values = [int(100 * x ** 3) for x in values] increasing = True count = 0 delay = 0.02 pwm.start(0) - + while True: pwm.ChangeDutyCycle(values[count]) - - time.sleep(delay) if increasing: count += 1 else: count -= 1 - - if (count >= len(values)-1): + + if (count >= len(values) - 1): increasing = False - - if (count <= 0): + elif (count <= 0): increasing = True - -# Wait forever... + + time.sleep(delay) + + +# Loop forever... try: p = Process(target=pulsateLed) p.start() while True: time.sleep(1) - print(time.asctime(),'and python is running!') + print(time.asctime(), 'and python is running!') except: GPIO.cleanup() p.terminate()
e74aff778d6657a8c4993c62f264008c9be99e78
api/app.py
api/app.py
# TODO: Add private API with Redis Cache and PostgreSQL (or any SQL DB with SQLAlchemy) from api import api, cache, db from flask import abort, Flask from flask_restful import Resource from os import getenv from api.resources.market import Data from api.resources.trend import Predict def setup_app(): db_uri = getenv('SQLALCHEMY_DATABASE_URI') # format: postgresql://user:pw@host:port/db if not db_uri: abort(401) app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = db_uri app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False with app.app_context(): api.init_app(app) return app def setup_db(application, sqlalchemy_bind, mem_cache=None): with application.app_context(): if mem_cache is not None: mem_cache.init_app(app) sqlalchemy_bind.init_app(app) sqlalchemy_bind.create_all() if __name__=='__main__': app = setup_app() from api.models import * # Load all DB models setup_db(app, db, mem_cache=cache) app.run(debug=True)
# TODO: Add private API with Redis Cache and PostgreSQL (or any SQL DB with SQLAlchemy) from api import api, cache, ENABLE_DB, db from flask import abort, Flask from flask_restful import Resource from os import getenv from api.resources.market import Data from api.resources.trend import Predict def setup_app(): app = Flask(__name__) if ENABLE_DB: db_uri = getenv('SQLALCHEMY_DATABASE_URI') # format: postgresql://user:pw@host:port/db if db_uri: app.config['SQLALCHEMY_DATABASE_URI'] = db_uri else: abort(401) app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False with app.app_context(): api.init_app(app) return app def setup_db(application, sqlalchemy_bind, mem_cache=None): with application.app_context(): if mem_cache is not None: mem_cache.init_app(app) sqlalchemy_bind.init_app(app) sqlalchemy_bind.create_all() if __name__=='__main__': app = setup_app() if ENABLE_DB: from api.models import * # Load all DB models setup_db(app, db, mem_cache=cache) app.run(debug=True)
Fix validate DB when DB is disabled and not connected
Fix validate DB when DB is disabled and not connected
Python
mit
amicks/Speculator
--- +++ @@ -1,5 +1,5 @@ # TODO: Add private API with Redis Cache and PostgreSQL (or any SQL DB with SQLAlchemy) -from api import api, cache, db +from api import api, cache, ENABLE_DB, db from flask import abort, Flask from flask_restful import Resource from os import getenv @@ -7,12 +7,15 @@ from api.resources.trend import Predict def setup_app(): - db_uri = getenv('SQLALCHEMY_DATABASE_URI') # format: postgresql://user:pw@host:port/db - if not db_uri: - abort(401) + app = Flask(__name__) - app = Flask(__name__) - app.config['SQLALCHEMY_DATABASE_URI'] = db_uri + if ENABLE_DB: + db_uri = getenv('SQLALCHEMY_DATABASE_URI') # format: postgresql://user:pw@host:port/db + if db_uri: + app.config['SQLALCHEMY_DATABASE_URI'] = db_uri + else: + abort(401) + app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False with app.app_context(): api.init_app(app) @@ -29,7 +32,8 @@ if __name__=='__main__': app = setup_app() - from api.models import * # Load all DB models - setup_db(app, db, mem_cache=cache) + if ENABLE_DB: + from api.models import * # Load all DB models + setup_db(app, db, mem_cache=cache) app.run(debug=True)
b68e609b746af6211a85493246242ba00a26f306
bin/hand_test_lib_main.py
bin/hand_test_lib_main.py
#!/usr/bin/env python import csv import sys from gwaith import get_rates, processors only = ('PLN', 'GBP') for data in get_rates(processor=processors.to_json, only=only): print(data) for data in get_rates(processor=processors.raw, only=only): print(data) for data in get_rates(processor=processors.raw_python, only=only): print(data) for data in get_rates(processor=processors.raw_python, only=only): print(data) writer = csv.writer(sys.stdout) writer.writerow(['Currency', 'Date', 'Rate']) for currency in get_rates( processor=processors.to_csv, only=only, writer=writer): # silly, isn't it, the to_csv might need rethinking, I guess pass
#!/usr/bin/env python import csv import sys from gwaith import get_rates, processors only = ('PLN', 'GBP') def header(msg): print('=' * 80 + '\r\t\t\t ' + msg + ' ') header('to_json') for data in get_rates(processor=processors.to_json, only=only): print(data) header('raw') for data in get_rates(processor=processors.raw, only=only): print(data) header('raw_python') for data in get_rates(processor=processors.raw_python, only=only): print(data) header('to_csv') writer = csv.writer(sys.stdout) writer.writerow(['Currency', 'Date', 'Rate']) for currency in get_rates( processor=processors.to_csv, only=only, writer=writer): # silly, isn't it, the to_csv might need rethinking, I guess pass
Improve output of the manual testing command adding headers
Improve output of the manual testing command adding headers
Python
mit
bartekbrak/gwaith,bartekbrak/gwaith,bartekbrak/gwaith
--- +++ @@ -6,17 +6,24 @@ only = ('PLN', 'GBP') + +def header(msg): + print('=' * 80 + '\r\t\t\t ' + msg + ' ') + + +header('to_json') for data in get_rates(processor=processors.to_json, only=only): print(data) +header('raw') for data in get_rates(processor=processors.raw, only=only): print(data) +header('raw_python') for data in get_rates(processor=processors.raw_python, only=only): print(data) -for data in get_rates(processor=processors.raw_python, only=only): - print(data) +header('to_csv') writer = csv.writer(sys.stdout) writer.writerow(['Currency', 'Date', 'Rate']) for currency in get_rates(
0498e1575f59880b4f7667f0d99bfbd993f2fcd5
profiles/backends.py
profiles/backends.py
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend class CaseInsensitiveModelBackend(ModelBackend): def authenticate(email=None, password=None, **kwargs): """ Created by LNguyen( Date: 14Dec2017 Description: Method to handle backend authentication for case insensitive usernames If the given credentials are valid, return a User object. """ UserModel = get_user_model() if email is None: email=kwargs.get(UserModel.email) try: user=UserModel.objects.get(email__iexact=email) user.backend = 'profiles.backends.CaseInsensitiveModelBackend' if user.check_password(password): return user except UserModel.DoesNotExist: # This backend doesn't accept these credentials as arguments. Try the next one. UserModel().set_password(password)
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend class CaseInsensitiveModelBackend(ModelBackend): def authenticate(email=None, password=None, **kwargs): """ Created by LNguyen( Date: 14Dec2017 Description: Method to handle backend authentication for case insensitive usernames If the given credentials are valid, return a User object. """ UserModel = get_user_model() if email is None: email=kwargs.get(UserModel.email) try: if (type(email)==str): user=UserModel.objects.get(email__iexact=email) else: user=UserModel.objects.get(email__exact=email) user.backend = 'profiles.backends.CaseInsensitiveModelBackend' if user.check_password(password): return user except UserModel.DoesNotExist: # This backend doesn't accept these credentials as arguments. Try the next one. UserModel().set_password(password)
Fix issues with changing passwords
Fix issues with changing passwords
Python
mit
gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,gdit-cnd/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID,LindaTNguyen/RAPID
--- +++ @@ -17,7 +17,10 @@ email=kwargs.get(UserModel.email) try: - user=UserModel.objects.get(email__iexact=email) + if (type(email)==str): + user=UserModel.objects.get(email__iexact=email) + else: + user=UserModel.objects.get(email__exact=email) user.backend = 'profiles.backends.CaseInsensitiveModelBackend' if user.check_password(password): return user
fc830b0caf29fe1424bc8fe30afcf7e21d8ecd72
inbound.py
inbound.py
import logging, email, yaml from django.utils import simplejson as json from google.appengine.ext import webapp, deferred from google.appengine.ext.webapp.mail_handlers import InboundMailHandler from google.appengine.api.urlfetch import fetch settings = yaml.load(open('settings.yaml')) def callback(raw): result = {'email': {'raw': raw}} fetch(settings['outbound_url'], payload=json.dumps(result), method="POST", headers={ 'Authorization': settings['api_key'], 'Content-Type': 'application/json' } ) class InboundHandler(InboundMailHandler): def receive(self, message): logging.info("Received a message from: " + message.sender) deferred.defer(callback, message.original.as_string(True), _queue='inbound')
import logging, email, yaml from django.utils import simplejson as json from google.appengine.ext import webapp, deferred from google.appengine.ext.webapp.mail_handlers import InboundMailHandler from google.appengine.api.urlfetch import fetch from google.appengine.api.urlfetch import Error as FetchError settings = yaml.load(open('settings.yaml')) def callback(raw): result = {'email': {'raw': raw}} response = fetch(settings['outbound_url'], payload=json.dumps(result), method="POST", headers={ 'Authorization': settings['api_key'], 'Content-Type': 'application/json' }, deadline=10 ) logging.info(response.status_code) if response.status_code != 200: raise FetchError() class InboundHandler(InboundMailHandler): def receive(self, message): logging.info("Received a message from: " + message.sender) deferred.defer(callback, message.original.as_string(True), _queue='inbound')
Raise if response is not 200
Raise if response is not 200
Python
mit
maccman/remail-engine
--- +++ @@ -3,20 +3,25 @@ from google.appengine.ext import webapp, deferred from google.appengine.ext.webapp.mail_handlers import InboundMailHandler from google.appengine.api.urlfetch import fetch +from google.appengine.api.urlfetch import Error as FetchError settings = yaml.load(open('settings.yaml')) def callback(raw): result = {'email': {'raw': raw}} - fetch(settings['outbound_url'], - payload=json.dumps(result), - method="POST", - headers={ - 'Authorization': settings['api_key'], - 'Content-Type': 'application/json' - } - ) + response = fetch(settings['outbound_url'], + payload=json.dumps(result), + method="POST", + headers={ + 'Authorization': settings['api_key'], + 'Content-Type': 'application/json' + }, + deadline=10 + ) + logging.info(response.status_code) + if response.status_code != 200: + raise FetchError() class InboundHandler(InboundMailHandler): def receive(self, message):
47ea7ebce827727bef5ad49e5df84fa0e5f6e4b9
pycloudflare/services.py
pycloudflare/services.py
from itertools import count from demands import HTTPServiceClient from yoconfig import get_config class CloudFlareService(HTTPServiceClient): def __init__(self, **kwargs): config = get_config('cloudflare') headers = { 'Content-Type': 'application/json', 'X-Auth-Key': config['api_key'], 'X-Auth-Email': config['email'] } super(CloudFlareService, self).__init__(config['url'], headers=headers) def get_zones(self): zones = [] for page in count(): batch = self.get( 'zones?page=%s&per_page=50' % page).json()['result'] if batch: zones.extend(batch) else: break return zones def get_zone(self, zone_id): return self.get('zones/%s' % zone_id).json()['result']
from itertools import count from demands import HTTPServiceClient from yoconfig import get_config class CloudFlareService(HTTPServiceClient): def __init__(self, **kwargs): config = get_config('cloudflare') headers = { 'Content-Type': 'application/json', 'X-Auth-Key': config['api_key'], 'X-Auth-Email': config['email'] } super(CloudFlareService, self).__init__(config['url'], headers=headers) def iter_zones(self): for page in count(): batch = self.get('zones?page=%i&per_page=50' % page).json()['result'] if not batch: return for result in batch: yield result def get_zones(self): return list(self.iter_zones()) def get_zone(self, zone_id): return self.get('zones/%s' % zone_id).json()['result']
Use an iterator to get pages
Use an iterator to get pages
Python
mit
gnowxilef/pycloudflare,yola/pycloudflare
--- +++ @@ -14,16 +14,16 @@ } super(CloudFlareService, self).__init__(config['url'], headers=headers) + def iter_zones(self): + for page in count(): + batch = self.get('zones?page=%i&per_page=50' % page).json()['result'] + if not batch: + return + for result in batch: + yield result + def get_zones(self): - zones = [] - for page in count(): - batch = self.get( - 'zones?page=%s&per_page=50' % page).json()['result'] - if batch: - zones.extend(batch) - else: - break - return zones + return list(self.iter_zones()) def get_zone(self, zone_id): return self.get('zones/%s' % zone_id).json()['result']
c496be720461722ce482c981b4915365dd0df8ab
events/views.py
events/views.py
from django.contrib import messages from django.contrib.auth.decorators import login_required from django.views.generic.list import ListView from django.views.generic.detail import DetailView from base.util import class_view_decorator from base.views import RedirectBackView from .models import Event, EventUserRegistration class EventListView(ListView): model = Event context_object_name = 'events' class EventDetailView(DetailView): model = Event context_object_name = 'event' @class_view_decorator(login_required) class EventUserRegisterView(RedirectBackView): default_return_view = 'events_event_list' def dispatch(self, request, *args, **kwargs): event = Event.objects.get(pk=kwargs['event_id']) if event.registration_open(): registration = EventUserRegistration(user=request.user, event=event) registration.save() message = 'Successfully registered to the %s' % event messages.add_message(request, messages.INFO, message) else: message = 'Registration to the %s is not open.' % event messages.add_message(request, messages.ERROR, message) return super(EventUserRegisterView, self).dispatch(request, *args, **kwargs)
from django.contrib import messages from django.contrib.auth.decorators import login_required from django.utils.translation import ugettext_lazy as _ from django.views.generic.list import ListView from django.views.generic.detail import DetailView from base.util import class_view_decorator from base.views import RedirectBackView from .models import Event, EventUserRegistration class EventListView(ListView): model = Event context_object_name = 'events' class EventDetailView(DetailView): model = Event context_object_name = 'event' @class_view_decorator(login_required) class EventUserRegisterView(RedirectBackView): default_return_view = 'events_event_list' def dispatch(self, request, *args, **kwargs): event = Event.objects.get(pk=kwargs['event_id']) # Check if user is not already registered registrations = EventUserRegistration.objects.filter( user=request.user, event=event).count() if registrations: message = _('You are already registered to the %s') % event messages.add_message(request, messages.ERROR, message) return super(EventUserRegisterView, self).dispatch(request, *args, **kwargs) if event.registration_open(): registration = EventUserRegistration(user=request.user, event=event) registration.save() message = _('Successfully registered to the %s') % event messages.add_message(request, messages.INFO, message) else: message = _('Registration to the %s is not open.') % event messages.add_message(request, messages.ERROR, message) return super(EventUserRegisterView, self).dispatch(request, *args, **kwargs)
Raise error when user is registering to the event multiple times
events: Raise error when user is registering to the event multiple times
Python
mit
matus-stehlik/roots,rtrembecky/roots,tbabej/roots,rtrembecky/roots,matus-stehlik/roots,rtrembecky/roots,tbabej/roots,tbabej/roots,matus-stehlik/roots
--- +++ @@ -1,5 +1,6 @@ from django.contrib import messages from django.contrib.auth.decorators import login_required +from django.utils.translation import ugettext_lazy as _ from django.views.generic.list import ListView from django.views.generic.detail import DetailView @@ -29,14 +30,26 @@ def dispatch(self, request, *args, **kwargs): event = Event.objects.get(pk=kwargs['event_id']) + # Check if user is not already registered + registrations = EventUserRegistration.objects.filter( + user=request.user, + event=event).count() + + if registrations: + message = _('You are already registered to the %s') % event + messages.add_message(request, messages.ERROR, message) + return super(EventUserRegisterView, self).dispatch(request, + *args, + **kwargs) + if event.registration_open(): registration = EventUserRegistration(user=request.user, event=event) registration.save() - message = 'Successfully registered to the %s' % event + message = _('Successfully registered to the %s') % event messages.add_message(request, messages.INFO, message) else: - message = 'Registration to the %s is not open.' % event + message = _('Registration to the %s is not open.') % event messages.add_message(request, messages.ERROR, message) return super(EventUserRegisterView, self).dispatch(request,
3fe4f1788d82719eac70ffe0fbbbae4dbe85f00b
evexml/forms.py
evexml/forms.py
from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if key_id and v_code: api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All')
from django import forms from django.forms.fields import IntegerField, CharField import evelink.account class AddAPIForm(forms.Form): key_id = IntegerField() v_code = CharField(max_length=64, min_length=1) def clean(self): self._clean() return super(AddAPIForm, self).clean() def _clean(self): """Check the access mask and characters of the supplied keypair. """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') if not (key_id and v_code): return api = evelink.api.API(api_key=(key_id, v_code)) account = evelink.account.Account(api) try: key_info = account.key_info().result except evelink.api.APIError as error: self.add_error(None, error.message) return if key_info['type'] != 'account': self.add_error(None, 'The API key should select Character: All') if key_info['access_mask'] != 4294967295: self.add_error(None, 'The API key should have full access') if key_info['expire_ts']: self.add_error(None, 'The API key should have no expiry checked')
Implement checks to pass tests
Implement checks to pass tests
Python
mit
randomic/aniauth-tdd,randomic/aniauth-tdd
--- +++ @@ -18,14 +18,20 @@ """ key_id = self.cleaned_data.get('key_id') v_code = self.cleaned_data.get('v_code') - if key_id and v_code: - api = evelink.api.API(api_key=(key_id, v_code)) - account = evelink.account.Account(api) - try: - key_info = account.key_info().result - except evelink.api.APIError as error: - self.add_error(None, error.message) - return + if not (key_id and v_code): + return - if key_info['type'] != 'account': - self.add_error(None, 'The API key should select Character: All') + api = evelink.api.API(api_key=(key_id, v_code)) + account = evelink.account.Account(api) + try: + key_info = account.key_info().result + except evelink.api.APIError as error: + self.add_error(None, error.message) + return + + if key_info['type'] != 'account': + self.add_error(None, 'The API key should select Character: All') + if key_info['access_mask'] != 4294967295: + self.add_error(None, 'The API key should have full access') + if key_info['expire_ts']: + self.add_error(None, 'The API key should have no expiry checked')
f31ab02d9a409e31acf339db2b950216472b8e9e
salesforce/backend/operations.py
salesforce/backend/operations.py
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # import re from django.db.backends import BaseDatabaseOperations """ Default database operations, with unquoted names. """ class DatabaseOperations(BaseDatabaseOperations): compiler_module = "salesforce.backend.compiler" def __init__(self, connection): # not calling superclass constructor to maintain Django 1.3 support self.connection = connection self._cache = None def connection_init(self): pass def sql_flush(self, style, tables, sequences): return [] def quote_name(self, name): return name def value_to_db_datetime(self, value): """ We let the JSON serializer handle dates for us. """ return value def last_insert_id(self, cursor, db_table, db_column): return cursor.lastrowid
# django-salesforce # # by Phil Christensen # (c) 2012-2013 Freelancers Union (http://www.freelancersunion.org) # See LICENSE.md for details # import re from django.db.backends import BaseDatabaseOperations """ Default database operations, with unquoted names. """ class DatabaseOperations(BaseDatabaseOperations): compiler_module = "salesforce.backend.compiler" def __init__(self, connection): # not calling superclass constructor to maintain Django 1.3 support self.connection = connection self._cache = None def connection_init(self): pass def sql_flush(self, style, tables, sequences): return [] def quote_name(self, name): return name def value_to_db_datetime(self, value): """ We let the JSON serializer handle dates for us. """ return value def value_to_db_date(self, value): """ We let the JSON serializer handle dates for us. """ return value def last_insert_id(self, cursor, db_table, db_column): return cursor.lastrowid
Fix bug with Date fields and SOQL.
Fix bug with Date fields and SOQL. Fixes https://github.com/freelancersunion/django-salesforce/issues/10
Python
mit
django-salesforce/django-salesforce,chromakey/django-salesforce,philchristensen/django-salesforce,hynekcer/django-salesforce,chromakey/django-salesforce,hynekcer/django-salesforce,hynekcer/django-salesforce,chromakey/django-salesforce,django-salesforce/django-salesforce,philchristensen/django-salesforce,django-salesforce/django-salesforce,philchristensen/django-salesforce
--- +++ @@ -35,6 +35,12 @@ We let the JSON serializer handle dates for us. """ return value + + def value_to_db_date(self, value): + """ + We let the JSON serializer handle dates for us. + """ + return value def last_insert_id(self, cursor, db_table, db_column): return cursor.lastrowid
84338dba126a25a0c37056df8d7fd0c5a13f2a69
selftest.features/environment.py
selftest.features/environment.py
# -*- coding: UTF-8 -*- """ before_step(context, step), after_step(context, step) These run before and after every step. The step passed in is an instance of Step. before_scenario(context, scenario), after_scenario(context, scenario) These run before and after each scenario is run. The scenario passed in is an instance of Scenario. before_feature(context, feature), after_feature(context, feature) These run before and after each feature file is exercised. The feature passed in is an instance of Feature. before_tag(context, tag), after_tag(context, tag) """ import logging import os.path import shutil def before_all(context): if not context.config.log_capture: logging.basicConfig(level=logging.DEBUG) def after_all(context): print "SUMMARY:" #def before_feature(context, feature): # context.workdir = None # #def after_feature(context, feature): # # destroy_workdir(context.workdir) # context.workdir = None # #def destroy_workdir(workdir): # if workdir and os.path.exists(workdir): # shutil.rmtree(workdir, ignore_errors=True)
# -*- coding: UTF-8 -*- """ before_step(context, step), after_step(context, step) These run before and after every step. The step passed in is an instance of Step. before_scenario(context, scenario), after_scenario(context, scenario) These run before and after each scenario is run. The scenario passed in is an instance of Scenario. before_feature(context, feature), after_feature(context, feature) These run before and after each feature file is exercised. The feature passed in is an instance of Feature. before_tag(context, tag), after_tag(context, tag) """ import logging import os.path import shutil def before_all(context): if not context.config.log_capture: logging.basicConfig(level=logging.DEBUG) def after_all(context): # TEMPORARILY-DISABLED: print "SUMMARY:" pass #def before_feature(context, feature): # context.workdir = None # #def after_feature(context, feature): # # destroy_workdir(context.workdir) # context.workdir = None # #def destroy_workdir(workdir): # if workdir and os.path.exists(workdir): # shutil.rmtree(workdir, ignore_errors=True)
Disable after_all() output for now.
Disable after_all() output for now.
Python
bsd-2-clause
jenisys/behave,jenisys/behave
--- +++ @@ -25,7 +25,8 @@ logging.basicConfig(level=logging.DEBUG) def after_all(context): - print "SUMMARY:" + # TEMPORARILY-DISABLED: print "SUMMARY:" + pass #def before_feature(context, feature): # context.workdir = None
fb7e771646946637824b06eaf6d21b8c1b2be164
main.py
main.py
# -*- coding: utf-8 -*- ''' url-shortener ============== An application for generating and storing shorter aliases for requested urls. Uses `spam-lists`__ to prevent generating a short url for an address recognized as spam, or to warn a user a pre-existing short alias has a target that has been later recognized as spam. .. __: https://github.com/piotr-rusin/spam-lists ''' from url_shortener import app, event_handlers, views __title__ = 'url-shortener' __version__ = '0.9.0.dev1' __author__ = 'Piotr Rusin' __email__ = "piotr.rusin88@gmail.com" __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016 Piotr Rusin' if not app.debug: import logging from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler(app.config['LOG_FILE']) file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) app.run()
# -*- coding: utf-8 -*- ''' url-shortener ============== An application for generating and storing shorter aliases for requested urls. Uses `spam-lists`__ to prevent generating a short url for an address recognized as spam, or to warn a user a pre-existing short alias has a target that has been later recognized as spam. .. __: https://github.com/piotr-rusin/spam-lists ''' from url_shortener import app, event_handlers, views __title__ = 'url-shortener' __version__ = '0.9.0.dev1' __author__ = 'Piotr Rusin' __email__ = "piotr.rusin88@gmail.com" __license__ = 'Apache 2.0' __copyright__ = 'Copyright 2016 Piotr Rusin' log_file = app.config['LOG_FILE'] if not app.debug and log_file is not None: import logging from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler(app.config['LOG_FILE']) file_handler.setLevel(logging.WARNING) app.logger.addHandler(file_handler) app.run()
Make application use log file if its name is not None
Make application use log file if its name is not None
Python
mit
piotr-rusin/url-shortener,piotr-rusin/url-shortener
--- +++ @@ -20,7 +20,9 @@ __copyright__ = 'Copyright 2016 Piotr Rusin' -if not app.debug: +log_file = app.config['LOG_FILE'] + +if not app.debug and log_file is not None: import logging from logging.handlers import TimedRotatingFileHandler file_handler = TimedRotatingFileHandler(app.config['LOG_FILE'])
805e86c0cd69f49863d2ca4c37e094a344d79c64
lib/jasy/core/MetaData.py
lib/jasy/core/MetaData.py
# # Jasy - JavaScript Tooling Refined # Copyright 2010 Sebastian Werner # class MetaData: """ Data structure to hold all dependency information Hint: Must be a clean data class without links to other systems for optiomal cachability using Pickle """ def __init__(self, tree): self.provides = set() self.requires = set() self.optionals = set() self.breaks = set() self.assets = set() self.__inspect(tree) def __inspect(self, node): """ The internal inspection routine """ # Parse comments try: comments = node.comments except AttributeError: comments = None if comments: for comment in comments: commentTags = comment.getTags() if commentTags: if "provide" in commentTags: self.provides.update(set(commentTags["provide"])) if "require" in commentTags: self.requires.update(set(commentTags["require"])) if "optional" in commentTags: self.optionals.update(set(commentTags["optional"])) if "break" in commentTags: self.breaks.update(set(commentTags["break"])) if "asset" in commentTags: self.assets.update(set(commentTags["asset"])) # Process children for child in node: self.__inspect(child)
# # Jasy - JavaScript Tooling Refined # Copyright 2010 Sebastian Werner # class MetaData: """ Data structure to hold all dependency information Hint: Must be a clean data class without links to other systems for optiomal cachability using Pickle """ __slots__ = ["provides", "requires", "optionals", "breaks", "assets"] def __init__(self, tree): self.provides = set() self.requires = set() self.optionals = set() self.breaks = set() self.assets = set() self.__inspect(tree) def __inspect(self, node): """ The internal inspection routine """ # Parse comments try: comments = node.comments except AttributeError: comments = None if comments: for comment in comments: commentTags = comment.getTags() if commentTags: if "provide" in commentTags: self.provides.update(set(commentTags["provide"])) if "require" in commentTags: self.requires.update(set(commentTags["require"])) if "optional" in commentTags: self.optionals.update(set(commentTags["optional"])) if "break" in commentTags: self.breaks.update(set(commentTags["break"])) if "asset" in commentTags: self.assets.update(set(commentTags["asset"])) # Process children for child in node: self.__inspect(child)
Make use of slots to reduce in-memory size
Make use of slots to reduce in-memory size
Python
mit
zynga/jasy,zynga/jasy,sebastian-software/jasy,sebastian-software/jasy
--- +++ @@ -10,6 +10,8 @@ Hint: Must be a clean data class without links to other systems for optiomal cachability using Pickle """ + + __slots__ = ["provides", "requires", "optionals", "breaks", "assets"] def __init__(self, tree): self.provides = set()
c73de73aca304d347e9faffa77eab417cec0b4b5
app/util.py
app/util.py
# Various utility functions import os SHOULD_CACHE = os.environ['ENV'] == 'production' def cached_function(func): data = {} def wrapper(*args): if not SHOULD_CACHE: return func(*args) cache_key = ' '.join([str(x) for x in args]) if cache_key not in data: data[cache_key] = func(*args) return data[cache_key] return wrapper
# Various utility functions import os SHOULD_CACHE = os.environ['ENV'] == 'production' def cached_function(func): data = {} def wrapper(*args): if not SHOULD_CACHE: return func(*args) cache_key = ' '.join([str(x) for x in args]) if cache_key not in data: data[cache_key] = func(*args) return data[cache_key] wrapper.__name__ = func.__name__ return wrapper
Make cached_function not modify function name
Make cached_function not modify function name
Python
mit
albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com,albertyw/albertyw.com
--- +++ @@ -16,4 +16,5 @@ data[cache_key] = func(*args) return data[cache_key] + wrapper.__name__ = func.__name__ return wrapper
bc8b0ce313d1b09469b8bc2e15fa068ce0133057
numpy/fft/fftpack_lite_clr.py
numpy/fft/fftpack_lite_clr.py
import clr clr.AddReference("fft") from numpy__fft__fftpack_cython import *
import clr clr.AddReference("fftpack_lite") from numpy__fft__fftpack_cython import *
Fix an incorrect library name.
Fix an incorrect library name.
Python
bsd-3-clause
numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor
--- +++ @@ -1,4 +1,4 @@ import clr -clr.AddReference("fft") +clr.AddReference("fftpack_lite") from numpy__fft__fftpack_cython import *
267a7cb5c3947697df341cb25f962da1fa791805
cubex/calltree.py
cubex/calltree.py
class CallTree(object): def __init__(self, node): self.call_id = node.get('id') self.region_id = node.get('calleeId') self.children = [] self.metrics = {} #cube.cindex[int(node.get('id'))] = self #for child_node in node.findall('cnode'): # child_tree = CallTree(child_node, cube) # self.children.append(child_tree)
class CallTree(object): def __init__(self, node): self.call_id = int(node.get('id')) self.region_id = int(node.get('calleeId')) self.children = [] self.metrics = {} #cube.cindex[int(node.get('id'))] = self #for child_node in node.findall('cnode'): # child_tree = CallTree(child_node, cube) # self.children.append(child_tree)
Save call tree indices as integers
Save call tree indices as integers
Python
apache-2.0
marshallward/cubex
--- +++ @@ -2,8 +2,8 @@ def __init__(self, node): - self.call_id = node.get('id') - self.region_id = node.get('calleeId') + self.call_id = int(node.get('id')) + self.region_id = int(node.get('calleeId')) self.children = [] self.metrics = {}
174ed142f4726b0f725cf24b83d8c1e45ea395c8
gunter-tweet.py
gunter-tweet.py
#!/usr/bin/env python import os import random import tweepy import config last_seen_path = os.path.join(os.path.dirname(__file__), 'last-seen') def get_api(): auth = tweepy.OAuthHandler(config.consumer_key, config.consumer_secret) auth.set_access_token(config.key, config.secret) return tweepy.API(auth) def get_last_seen(): try: return int(open(last_seen_path).read()) except: pass def save_last_seen(mentions): open(last_seen_path, 'w').write(str(mentions[-1].id)) def generate_wenks(): return ' '.join(['Wenk.'] * random.randrange(1, 4)) def generate_reply(mention): return '@' + mention.user.screen_name + ' ' + generate_wenks() def should_wenk(): # 10% probability return random.randrange(-9, 1) == 0 api = get_api() mentions = api.mentions_timeline(since_id=get_last_seen()) if mentions: for mention in reversed(mentions): api.update_status(generate_reply(mention)) save_last_seen(mentions) elif should_wenk(): api.update_status(generate_wenks())
#!/usr/bin/env python import os import random import tweepy import config last_seen_path = os.path.join(os.path.dirname(__file__), 'last-seen') def get_api(): auth = tweepy.OAuthHandler(config.consumer_key, config.consumer_secret) auth.set_access_token(config.key, config.secret) return tweepy.API(auth) def get_last_seen(): try: return int(open(last_seen_path).read()) except: pass def save_last_seen(mentions): open(last_seen_path, 'w').write(str(mentions[0].id)) def generate_wenks(): return ' '.join(['Wenk.'] * random.randrange(1, 4)) def generate_reply(mention): return '@' + mention.user.screen_name + ' ' + generate_wenks() def should_wenk(): # 10% probability return random.randrange(-9, 1) == 0 api = get_api() mentions = api.mentions_timeline(since_id=get_last_seen()) if mentions: for mention in reversed(mentions): api.update_status(generate_reply(mention)) save_last_seen(mentions) elif should_wenk(): api.update_status(generate_wenks())
Use the first mention for saving
Use the first mention for saving
Python
agpl-3.0
gnoronha/gunter-tweet
--- +++ @@ -25,7 +25,7 @@ def save_last_seen(mentions): - open(last_seen_path, 'w').write(str(mentions[-1].id)) + open(last_seen_path, 'w').write(str(mentions[0].id)) def generate_wenks():
29f727f5391bb3fc40270b58a798f146cc202a3d
modules/pipeurlbuilder.py
modules/pipeurlbuilder.py
# pipeurlbuilder.py # import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' if 'PATH' in conf: path = conf['PATH'] if not isinstance(path, list): path = [path] path = [util.get_value(p, item, **kwargs) for p in path if p] url += "/".join(p for p in path if p) url = url.rstrip("/") #Ensure url is valid url = util.url_quote(url) params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM'] if p]) if params and params.keys() != [u'']: url += "?" + urllib.urlencode(params) yield url
# pipeurlbuilder.py # import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' if 'PATH' in conf: path = conf['PATH'] if not isinstance(path, list): path = [path] path = [util.get_value(p, item, **kwargs) for p in path if p] url += "/".join(p for p in path if p) url = url.rstrip("/") #Ensure url is valid url = util.url_quote(url) param_defs = conf['PARAM'] if not isinstance(param_defs, list): param_defs = [param_defs] params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in param_defs if p]) if params and params.keys() != [u'']: url += "?" + urllib.urlencode(params) yield url
Handle single param definition (following Yahoo! changes)
Handle single param definition (following Yahoo! changes)
Python
mit
nerevu/riko,nerevu/riko
--- +++ @@ -37,7 +37,11 @@ #Ensure url is valid url = util.url_quote(url) - params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM'] if p]) + param_defs = conf['PARAM'] + if not isinstance(param_defs, list): + param_defs = [param_defs] + + params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in param_defs if p]) if params and params.keys() != [u'']: url += "?" + urllib.urlencode(params)
d5229fcae9481ff6666eeb076825f4ddd3929b02
asyncio/__init__.py
asyncio/__init__.py
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * if sys.platform == 'win32': # pragma: no cover from .windows_events import * else: from .unix_events import * # pragma: no cover __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__)
"""The asyncio package, tracking PEP 3156.""" import sys # The selectors module is in the stdlib in Python 3.4 but not in 3.3. # Do this first, so the other submodules can use "from . import selectors". # Prefer asyncio/selectors.py over the stdlib one, as ours may be newer. try: from . import selectors except ImportError: import selectors # Will also be exported. if sys.platform == 'win32': # Similar thing for _overlapped. try: from . import _overlapped except ImportError: import _overlapped # Will also be exported. # This relies on each of the submodules having an __all__ variable. from .coroutines import * from .events import * from .futures import * from .locks import * from .protocols import * from .queues import * from .streams import * from .subprocess import * from .tasks import * from .transports import * __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + locks.__all__ + protocols.__all__ + queues.__all__ + streams.__all__ + subprocess.__all__ + tasks.__all__ + transports.__all__) if sys.platform == 'win32': # pragma: no cover from .windows_events import * __all__ += windows_events.__all__ else: from .unix_events import * # pragma: no cover __all__ += unix_events.__all__
Fix asyncio.__all__: export also unix_events and windows_events symbols
Fix asyncio.__all__: export also unix_events and windows_events symbols For example, on Windows, it was not possible to get ProactorEventLoop or DefaultEventLoopPolicy using "from asyncio import *".
Python
apache-2.0
overcastcloud/trollius,overcastcloud/trollius,overcastcloud/trollius
--- +++ @@ -29,12 +29,6 @@ from .tasks import * from .transports import * -if sys.platform == 'win32': # pragma: no cover - from .windows_events import * -else: - from .unix_events import * # pragma: no cover - - __all__ = (coroutines.__all__ + events.__all__ + futures.__all__ + @@ -45,3 +39,10 @@ subprocess.__all__ + tasks.__all__ + transports.__all__) + +if sys.platform == 'win32': # pragma: no cover + from .windows_events import * + __all__ += windows_events.__all__ +else: + from .unix_events import * # pragma: no cover + __all__ += unix_events.__all__
e642716c0815c989b994d436921b0fb1a4f3dfa1
djangae/checks.py
djangae/checks.py
import os from django.core import checks from google.appengine.tools.devappserver2.application_configuration import ModuleConfiguration from djangae.environment import get_application_root def check_deferred_builtin(app_configs=None, **kwargs): """ Check that the deferred builtin is switched off, as it'll override Djangae's deferred handler """ app_yaml_path = os.path.join(get_application_root(), "app.yaml") config = ModuleConfiguration(app_yaml_path) errors = [] for handler in config.handlers: if handler.url == '/_ah/queue/deferred': if handler.script == 'google.appengine.ext.deferred.application': errors.append( checks.Warning( "Deferred builtin is switched on. This overrides Djangae's deferred handler", hint='Remove deferred builtin from app.yaml', id='djangae.W001' ) ) break return errors
import os from django.core import checks from djangae.environment import get_application_root def check_deferred_builtin(app_configs=None, **kwargs): """ Check that the deferred builtin is switched off, as it'll override Djangae's deferred handler """ from google.appengine.tools.devappserver2.application_configuration import ModuleConfiguration app_yaml_path = os.path.join(get_application_root(), "app.yaml") config = ModuleConfiguration(app_yaml_path) errors = [] for handler in config.handlers: if handler.url == '/_ah/queue/deferred': if handler.script == 'google.appengine.ext.deferred.application': errors.append( checks.Warning( "Deferred builtin is switched on. This overrides Djangae's deferred handler", hint='Remove deferred builtin from app.yaml', id='djangae.W001' ) ) break return errors
Move import that depends on devserver
Move import that depends on devserver
Python
bsd-3-clause
potatolondon/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae,grzes/djangae
--- +++ @@ -1,7 +1,6 @@ import os from django.core import checks -from google.appengine.tools.devappserver2.application_configuration import ModuleConfiguration from djangae.environment import get_application_root @@ -10,6 +9,8 @@ """ Check that the deferred builtin is switched off, as it'll override Djangae's deferred handler """ + from google.appengine.tools.devappserver2.application_configuration import ModuleConfiguration + app_yaml_path = os.path.join(get_application_root(), "app.yaml") config = ModuleConfiguration(app_yaml_path) errors = []
6eb8ad49e25039ad61470e30e42c8ab352ab9b1c
sep/sep_search_result.py
sep/sep_search_result.py
from lxml import html import re import requests from constants import SEP_URL class SEPSearchResult(): query = None results = None def __init__(self, query): self.set_query(query) def set_query(self, query): pattern = re.compile('[^a-zA-Z\d\s]') stripped_query = re.sub(pattern, '', query) self.query = str(stripped_query).lower().split() @property def url(self): url = SEP_URL + "search/searcher.py?query=" for word in self.query: url += word + "+" return url def request_results(self): page = requests.get(self.url) # Remvoe bold tags text_no_bold = re.sub('</? ?b>', '', page.text) text_no_newlines = re.sub('\n', '', text_no_bold) tree = html.fromstring(text_no_newlines) titles = tree.xpath("//div[@class='result_title']/a/text()") urls = tree.xpath("//div[@class='result_title']/a/@href") # Build the output tuples output = [] for i in range(len(titles)): output.append( { "title": titles[i], "url": SEP_URL + urls[i].lstrip("../") } ) self.results = output return output
from lxml import html import re import requests from constants import SEP_URL class SEPSearchResult(): query = None results = None def __init__(self, query): self.set_query(query) def set_query(self, query): pattern = re.compile('[^a-zA-Z\d\s]') stripped_query = re.sub(pattern, '', query) self.query = str(stripped_query).lower().split() @property def url(self): url = SEP_URL + "search/searcher.py?query=" for word in self.query: url += word + "+" print url return url def request_results(self): page = requests.get(self.url) # Remvoe bold tags text_no_bold = re.sub('</? ?b>', '', page.text) text_no_newlines = re.sub('\n', '', text_no_bold) tree = html.fromstring(text_no_newlines) titles = tree.xpath("//div[@class='result_title']/a/text()") urls = tree.xpath("//div[@class='result_title']/a/@href") # Build the output tuples output = [] for i in range(len(titles)): output.append( { "title": titles[i], "url": SEP_URL + urls[i].lstrip("../") } ) self.results = output return output
Print SEP urls for debug
New: Print SEP urls for debug
Python
mit
AFFogarty/SEP-Bot,AFFogarty/SEP-Bot
--- +++ @@ -22,6 +22,7 @@ url = SEP_URL + "search/searcher.py?query=" for word in self.query: url += word + "+" + print url return url def request_results(self):
7fdbe50d113a78fd02101056b56d44d917c5571c
joins/models.py
joins/models.py
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email)
from django.db import models # Create your models here. class Join(models.Model): email = models.EmailField() ip_address = models.CharField(max_length=120, default='ABC') timestamp = models.DateTimeField(auto_now_add = True, auto_now=False) updated = models.DateTimeField(auto_now_add = False, auto_now=True) def __unicode__(self): return "%s" %(self.email) #To see the guide on using south, go here: #https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md
Add South Guide, made message for it
Add South Guide, made message for it
Python
mit
codingforentrepreneurs/launch-with-code,codingforentrepreneurs/launch-with-code,krishnazure/launch-with-code,krishnazure/launch-with-code,krishnazure/launch-with-code
--- +++ @@ -11,3 +11,7 @@ def __unicode__(self): return "%s" %(self.email) + +#To see the guide on using south, go here: +#https://github.com/codingforentrepreneurs/Guides/blob/master/using_south_in_django.md +
e43ea9602c272119f18e270a0ee138401ee0b02a
digit_guesser.py
digit_guesser.py
import matplotlib.pyplot as plt from sklearn import datasets from sklearn import svm digits = datasets.load_digits() clf = svm.SVC(gamma=0.0001, C=100) training_set = digits.data[:-10] labels = digits.target[:-10] x, y = training_set, labels clf.fit(x, y) for i in range(10): print("Prediction: {}".format(clf.predict([digits.data[-i]]))) print("Digit: [{}]".format(digits.target[-i])) # print('Prediction: ', clf.predict([digits.data[-1]])) # plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest') # plt.show()
from sklearn import datasets from sklearn import svm digits = datasets.load_digits() clf = svm.SVC(gamma=0.0001, C=100) training_set = digits.data[:-10] training_labels = digits.target[:-10] testing_set = digits.data[-10:] testing_labels = digits.target[-10:] x, y = training_set, training_labels clf.fit(x, y) for i in range(10): print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
Make variables self descriptive and create a testing set.
Make variables self descriptive and create a testing set.
Python
mit
jeancsil/machine-learning
--- +++ @@ -1,5 +1,3 @@ -import matplotlib.pyplot as plt - from sklearn import datasets from sklearn import svm @@ -8,15 +6,13 @@ clf = svm.SVC(gamma=0.0001, C=100) training_set = digits.data[:-10] -labels = digits.target[:-10] +training_labels = digits.target[:-10] -x, y = training_set, labels +testing_set = digits.data[-10:] +testing_labels = digits.target[-10:] + +x, y = training_set, training_labels clf.fit(x, y) for i in range(10): - print("Prediction: {}".format(clf.predict([digits.data[-i]]))) - print("Digit: [{}]".format(digits.target[-i])) - -# print('Prediction: ', clf.predict([digits.data[-1]])) -# plt.imshow(digits.images[-1], cmap=plt.cm.gray_r, interpolation='nearest') -# plt.show() + print("Test set: {}. Predicted: {}".format(testing_labels[i], clf.predict([testing_set[i]])[0]))
86b889049ef1ee1c896e4ab44185fc54ef87a2c0
IPython/consoleapp.py
IPython/consoleapp.py
""" Shim to maintain backwards compatibility with old IPython.consoleapp imports. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from warnings import warn warn("The `IPython.consoleapp` package has been deprecated. " "You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2) from jupyter_client.consoleapp import *
""" Shim to maintain backwards compatibility with old IPython.consoleapp imports. """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. from warnings import warn warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." "You should import from jupyter_client.consoleapp instead.", stacklevel=2) from jupyter_client.consoleapp import *
Remove Deprecation Warning, add since when things were deprecated.
Remove Deprecation Warning, add since when things were deprecated.
Python
bsd-3-clause
ipython/ipython,ipython/ipython
--- +++ @@ -6,7 +6,7 @@ from warnings import warn -warn("The `IPython.consoleapp` package has been deprecated. " - "You should import from jupyter_client.consoleapp instead.", DeprecationWarning, stacklevel=2) +warn("The `IPython.consoleapp` package has been deprecated since IPython 4.0." + "You should import from jupyter_client.consoleapp instead.", stacklevel=2) from jupyter_client.consoleapp import *
4600fd4cf06d1a2f58b92ec5f9ce1e502cf1da33
bawebauth/models.py
bawebauth/models.py
# -*- coding: utf-8 -*- from django.db import models from django.core.cache import cache from django.contrib.auth.models import User from bawebauth.decorators import cache_property from django.utils.translation import ugettext_lazy as _ class Device(models.Model): user = models.ForeignKey(User) name = models.CharField(_('name'), max_length=100) ident = models.CharField(_('ident'), max_length=40) crdate = models.DateTimeField(_('date created'), auto_now_add=True) tstamp = models.DateTimeField(_('date edited'), auto_now=True) def __unicode__(self): return self.name @cache_property def usages(self): return self.usage_set.order_by('-crdate') @cache_property def last_usage(self): return self.usage_set.latest('crdate') class Usage(models.Model): user = models.ForeignKey(User) device = models.ForeignKey(Device) send = models.IntegerField(_('bytes send')) received = models.IntegerField(_('bytes received')) crdate = models.DateTimeField(_('date created'), auto_now_add=True) def __unicode__(self): return u'%s %s+ %s-' % (self.crdate, self.send, self.received)
# -*- coding: utf-8 -*- from django.db import models from django.core.cache import cache from django.contrib.auth.models import User from bawebauth.decorators import cache_property from django.utils.translation import ugettext_lazy as _ class Device(models.Model): user = models.ForeignKey(User) name = models.CharField(_('name'), max_length=100) ident = models.CharField(_('ident'), max_length=40) crdate = models.DateTimeField(_('date created'), auto_now_add=True) tstamp = models.DateTimeField(_('date edited'), auto_now=True) def __unicode__(self): return self.name @cache_property def usages(self): return self.usage_set.order_by('crdate') @cache_property def last_usage(self): return self.usage_set.latest('crdate') class Usage(models.Model): user = models.ForeignKey(User) device = models.ForeignKey(Device) send = models.IntegerField(_('bytes send')) received = models.IntegerField(_('bytes received')) crdate = models.DateTimeField(_('date created'), auto_now_add=True) def __unicode__(self): return u'%s %s+ %s-' % (self.crdate, self.send, self.received)
Revert order of usage property
Revert order of usage property
Python
mit
mback2k/django-bawebauth,mback2k/django-bawebauth,mback2k/django-bawebauth,mback2k/django-bawebauth
--- +++ @@ -17,7 +17,7 @@ @cache_property def usages(self): - return self.usage_set.order_by('-crdate') + return self.usage_set.order_by('crdate') @cache_property def last_usage(self):
c974a2fe075accdf58148fceb3f722b144e0b8d8
diylang/types.py
diylang/types.py
# -*- coding: utf-8 -*- """ This module holds some types we'll have use for along the way. It's your job to implement the Closure and Environment types. The DiyLangError class you can have for free :) """ class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure: def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment: def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String: """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val
# -*- coding: utf-8 -*- """ This module holds some types we'll have use for along the way. It's your job to implement the Closure and Environment types. The DiyLangError class you can have for free :) """ class DiyLangError(Exception): """General DIY Lang error class.""" pass class Closure(object): def __init__(self, env, params, body): raise NotImplementedError("DIY") def __repr__(self): return "<closure/%d>" % len(self.params) class Environment(object): def __init__(self, variables=None): self.bindings = variables if variables else {} def lookup(self, symbol): raise NotImplementedError("DIY") def extend(self, variables): raise NotImplementedError("DIY") def set(self, symbol, value): raise NotImplementedError("DIY") class String(object): """ Simple data object for representing DIY Lang strings. Ignore this until you start working on part 8. """ def __init__(self, val=""): self.val = val def __str__(self): return '"{}"'.format(self.val) def __eq__(self, other): return isinstance(other, String) and other.val == self.val
Fix Old-style class, subclass object explicitly.
Fix Old-style class, subclass object explicitly.
Python
bsd-3-clause
kvalle/diy-lisp,kvalle/diy-lisp,kvalle/diy-lang,kvalle/diy-lang
--- +++ @@ -13,7 +13,7 @@ pass -class Closure: +class Closure(object): def __init__(self, env, params, body): raise NotImplementedError("DIY") @@ -22,7 +22,7 @@ return "<closure/%d>" % len(self.params) -class Environment: +class Environment(object): def __init__(self, variables=None): self.bindings = variables if variables else {} @@ -37,7 +37,8 @@ raise NotImplementedError("DIY") -class String: +class String(object): + """ Simple data object for representing DIY Lang strings.
87d4e604ef72fbe0513c725a7fdf0d421e633257
changes/api/project_index.py
changes/api/project_index.py
from __future__ import absolute_import, division, unicode_literals from sqlalchemy.orm import joinedload from changes.api.base import APIView from changes.constants import Status from changes.models import Project, Build class ProjectIndexAPIView(APIView): def get(self): queryset = Project.query.order_by(Project.name.asc()) project_list = list(queryset) context = { 'projects': [], } for project in project_list: data = self.serialize(project) data['lastBuild'] = Build.query.options( joinedload(Build.project), joinedload(Build.author), ).filter( Build.revision_sha != None, # NOQA Build.patch_id == None, Build.project == project, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() data['numActiveBuilds'] = Build.query.filter( Build.project == project, Build.status != Status.finished, ).count() context['projects'].append(data) return self.respond(context) def get_stream_channels(self): return ['builds:*']
from __future__ import absolute_import, division, unicode_literals from sqlalchemy.orm import joinedload from changes.api.base import APIView from changes.constants import Status from changes.models import Project, Build class ProjectIndexAPIView(APIView): def get(self): queryset = Project.query.order_by(Project.name.asc()) project_list = list(queryset) context = { 'projects': [], } for project in project_list: data = self.serialize(project) data['lastBuild'] = Build.query.options( joinedload(Build.project), joinedload(Build.author), ).filter( Build.revision_sha != None, # NOQA Build.patch_id == None, Build.project == project, Build.status == Status.finished, ).order_by( Build.date_created.desc(), ).first() context['projects'].append(data) return self.respond(context) def get_stream_channels(self): return ['builds:*']
Remove numActiveBuilds as its unused
Remove numActiveBuilds as its unused
Python
apache-2.0
dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes
--- +++ @@ -31,11 +31,6 @@ Build.date_created.desc(), ).first() - data['numActiveBuilds'] = Build.query.filter( - Build.project == project, - Build.status != Status.finished, - ).count() - context['projects'].append(data) return self.respond(context)
194e6a34744963e2a7b17b846ee2913e6e01ae11
pyblish_starter/plugins/validate_rig_members.py
pyblish_starter/plugins/validate_rig_members.py
import pyblish.api class ValidateStarterRigFormat(pyblish.api.InstancePlugin): """A rig must have a certain hierarchy and members - Must reside within `rig_GRP` transform - controls_SEL - cache_SEL - resources_SEL (optional) """ label = "Rig Format" order = pyblish.api.ValidatorOrder hosts = ["maya"] families = ["starter.rig"] def process(self, instance): missing = list() for member in ("controls_SEL", "cache_SEL"): if member not in instance: missing.append(member) assert not missing, "\"%s\" is missing members: %s" % ( instance, ", ".join("\"" + member + "\"" for member in missing))
import pyblish.api class ValidateStarterRigFormat(pyblish.api.InstancePlugin): """A rig must have a certain hierarchy and members - Must reside within `rig_GRP` transform - out_SEL - controls_SEL - in_SEL (optional) - resources_SEL (optional) """ label = "Rig Format" order = pyblish.api.ValidatorOrder hosts = ["maya"] families = ["starter.rig"] def process(self, instance): missing = list() for member in ("controls_SEL", "out_SEL"): if member not in instance: missing.append(member) assert not missing, "\"%s\" is missing members: %s" % ( instance, ", ".join("\"" + member + "\"" for member in missing))
Update interface for rigs - in/out versus None/cache
Update interface for rigs - in/out versus None/cache
Python
mit
pyblish/pyblish-starter,pyblish/pyblish-mindbender,mindbender-studio/core,MoonShineVFX/core,getavalon/core,MoonShineVFX/core,mindbender-studio/core,getavalon/core
--- +++ @@ -5,8 +5,9 @@ """A rig must have a certain hierarchy and members - Must reside within `rig_GRP` transform + - out_SEL - controls_SEL - - cache_SEL + - in_SEL (optional) - resources_SEL (optional) """ @@ -20,7 +21,7 @@ missing = list() for member in ("controls_SEL", - "cache_SEL"): + "out_SEL"): if member not in instance: missing.append(member)
42c76c83e76439e5d8377bed2f159cfe988f05b1
src/icalendar/__init__.py
src/icalendar/__init__.py
from icalendar.cal import ( Calendar, Event, Todo, Journal, Timezone, TimezoneStandard, TimezoneDaylight, FreeBusy, Alarm, ComponentFactory, ) # Property Data Value Types from icalendar.prop import ( vBinary, vBoolean, vCalAddress, vDatetime, vDate, vDDDTypes, vDuration, vFloat, vInt, vPeriod, vWeekday, vFrequency, vRecur, vText, vTime, vUri, vGeo, vUTCOffset, TypesFactory, ) # useful tzinfo subclasses from icalendar.prop import ( FixedOffset, LocalTimezone, ) # Parameters and helper methods for splitting and joining string with escaped # chars. from icalendar.parser import ( Parameters, q_split, q_join, ) __all__ = [ Calendar, Event, Todo, Journal, FreeBusy, Alarm, ComponentFactory, Timezone, TimezoneStandard, TimezoneDaylight, vBinary, vBoolean, vCalAddress, vDatetime, vDate, vDDDTypes, vDuration, vFloat, vInt, vPeriod, vWeekday, vFrequency, vRecur, vText, vTime, vUri, vGeo, vUTCOffset, TypesFactory, FixedOffset, LocalTimezone, Parameters, q_split, q_join, ]
from icalendar.cal import ( Calendar, Event, Todo, Journal, Timezone, TimezoneStandard, TimezoneDaylight, FreeBusy, Alarm, ComponentFactory, ) # Property Data Value Types from icalendar.prop import ( vBinary, vBoolean, vCalAddress, vDatetime, vDate, vDDDTypes, vDuration, vFloat, vInt, vPeriod, vWeekday, vFrequency, vRecur, vText, vTime, vUri, vGeo, vUTCOffset, TypesFactory, ) # useful tzinfo subclasses from icalendar.prop import ( FixedOffset, LocalTimezone, ) # Parameters and helper methods for splitting and joining string with escaped # chars. from icalendar.parser import ( Parameters, q_split, q_join, )
Remove incorrect use of __all__
Remove incorrect use of __all__
Python
bsd-2-clause
untitaker/icalendar,nylas/icalendar,geier/icalendar
--- +++ @@ -44,16 +44,3 @@ q_split, q_join, ) - - -__all__ = [ - Calendar, Event, Todo, Journal, - FreeBusy, Alarm, ComponentFactory, - Timezone, TimezoneStandard, TimezoneDaylight, - vBinary, vBoolean, vCalAddress, vDatetime, vDate, - vDDDTypes, vDuration, vFloat, vInt, vPeriod, - vWeekday, vFrequency, vRecur, vText, vTime, vUri, - vGeo, vUTCOffset, TypesFactory, - FixedOffset, LocalTimezone, - Parameters, q_split, q_join, -]
59cd76a166a46756977440f46b858efa276c0aa0
fireplace/cards/utils.py
fireplace/cards/utils.py
import random import fireplace.cards from ..actions import * from ..enums import CardType, GameTag, Race, Rarity, Zone from ..targeting import * def hand(func): """ @hand helper decorator The decorated event listener will only listen while in the HAND Zone """ func.zone = Zone.HAND return func drawCard = lambda self, *args: self.controller.draw() def randomCollectible(**kwargs): return random.choice(fireplace.cards.filter(collectible=True, **kwargs))
import random import fireplace.cards from ..actions import * from ..enums import CardType, GameTag, Race, Rarity, Zone from ..targeting import * def hand(func): """ @hand helper decorator The decorated event listener will only listen while in the HAND Zone """ func.zone = Zone.HAND return func drawCard = lambda self, *args: self.controller.draw() def RandomCard(**kwargs): return random.choice(fireplace.cards.filter(**kwargs)) def randomCollectible(**kwargs): return RandomCard(collectible=True, **kwargs)
Implement a RandomCard helper for definitions
Implement a RandomCard helper for definitions
Python
agpl-3.0
jleclanche/fireplace,Meerkov/fireplace,amw2104/fireplace,NightKev/fireplace,oftc-ftw/fireplace,butozerca/fireplace,liujimj/fireplace,liujimj/fireplace,Meerkov/fireplace,smallnamespace/fireplace,Ragowit/fireplace,amw2104/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace,butozerca/fireplace,Ragowit/fireplace
--- +++ @@ -16,5 +16,9 @@ drawCard = lambda self, *args: self.controller.draw() +def RandomCard(**kwargs): + return random.choice(fireplace.cards.filter(**kwargs)) + + def randomCollectible(**kwargs): - return random.choice(fireplace.cards.filter(collectible=True, **kwargs)) + return RandomCard(collectible=True, **kwargs)
a0d10e419b504dc2e7f4ba45a5d10a2d9d47019c
knights/base.py
knights/base.py
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.root = parse.parse(raw) code = ast.Expression( body=ast.ListComp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.root.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.nodelist = parse.parse(raw) code = ast.Expression( body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
Use a generator for rendering, and pass nodelist unwrapped
Use a generator for rendering, and pass nodelist unwrapped
Python
mit
funkybob/knights-templater,funkybob/knights-templater
--- +++ @@ -7,10 +7,10 @@ class Template: def __init__(self, raw): self.raw = raw - self.root = parse.parse(raw) + self.nodelist = parse.parse(raw) code = ast.Expression( - body=ast.ListComp( + body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ @@ -20,11 +20,11 @@ attr='render', ctx=ast.Load() ), - args=[ - ast.Name(id='context', ctx=ast.Load()), - ], keywords=[], starargs=None, kwargs=None + args=[ ast.Name(id='context', ctx=ast.Load()), ], + keywords=[], starargs=None, kwargs=None ), - ], keywords=[], starargs=None, kwargs=None + ], + keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( @@ -41,7 +41,7 @@ def render(self, context): global_ctx = { - 'nodelist': self.root.nodelist, + 'nodelist': self.nodelist, 'context': dict(context), }
16811d4f379974fb94c98b56b398a4d555e3e4cd
jasy/item/Doc.py
jasy/item/Doc.py
# # Jasy - Web Tooling Framework # Copyright 2010-2012 Zynga Inc. # Copyright 2013-2014 Sebastian Werner # import os import jasy.js.api.Data as Data import jasy.core.Text as Text import jasy.item.Abstract as Abstract from jasy import UserError class DocItem(Abstract.AbstractItem): kind = "doc" def generateId(self, relpath, package): if package: fileId = "%s/" % package else: fileId = "" return (fileId + os.path.splitext(relPath)[0]).replace("/", ".") def getApi(self): field = "api[%s]" % self.id apidata = self.project.getCache().read(field, self.getModificationTime()) if not Text.supportsMarkdown: raise UserError("Missing Markdown feature to convert package docs into HTML.") if apidata is None: apidata = Data.ApiData(self.id) apidata.main["type"] = "Package" apidata.main["doc"] = Text.highlightCodeBlocks(Text.markdownToHtml(self.getText())) self.project.getCache().store(field, apidata, self.getModificationTime()) return apidata
# # Jasy - Web Tooling Framework # Copyright 2010-2012 Zynga Inc. # Copyright 2013-2014 Sebastian Werner # import os import jasy.js.api.Data as Data import jasy.core.Text as Text import jasy.item.Abstract as Abstract from jasy import UserError class DocItem(Abstract.AbstractItem): kind = "doc" def generateId(self, relpath, package): if package: fileId = "%s/" % package else: fileId = "" return (fileId + os.path.dirname(relpath)).replace("/", ".") def getApi(self): field = "api[%s]" % self.id apidata = self.project.getCache().read(field, self.getModificationTime()) if not Text.supportsMarkdown: raise UserError("Missing Markdown feature to convert package docs into HTML.") if apidata is None: apidata = Data.ApiData(self.id) apidata.main["type"] = "Package" apidata.main["doc"] = Text.highlightCodeBlocks(Text.markdownToHtml(self.getText())) self.project.getCache().store(field, apidata, self.getModificationTime()) return apidata
Fix ID generation for js package documentation
Fix ID generation for js package documentation
Python
mit
sebastian-software/jasy,sebastian-software/jasy
--- +++ @@ -22,7 +22,7 @@ else: fileId = "" - return (fileId + os.path.splitext(relPath)[0]).replace("/", ".") + return (fileId + os.path.dirname(relpath)).replace("/", ".") def getApi(self): field = "api[%s]" % self.id
6593645ace6efdc0e7b79dbdf5a5b5f76396c693
cli/cli.py
cli/cli.py
import argparse parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis') parser.add_argument('-v', '--version', action='version', version='0.1.0') parser.parse_args()
import argparse parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis') parser.add_argument('-v', '--version', action='version', version='0.1.0') subparsers = parser.add_subparsers(help='commands') # A list command list_parser = subparsers.add_parser('list', help='List commands') list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice') parser.parse_args()
Add commands for listing available analytics commadns
Add commands for listing available analytics commadns
Python
mit
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
--- +++ @@ -2,4 +2,11 @@ parser = argparse.ArgumentParser(prog='moocx', description='EdX MOOC Data Anaylysis') parser.add_argument('-v', '--version', action='version', version='0.1.0') + +subparsers = parser.add_subparsers(help='commands') + +# A list command +list_parser = subparsers.add_parser('list', help='List commands') +list_parser.add_argument('list_commands', action='store', choices=['all', 'basic'],help='List anayltics commands based on choice') + parser.parse_args()
808413e56eb14568eae98791581c0f5870f46cd2
example/config.py
example/config.py
# pyinfra # File: pyinfra/example/config.py # Desc: entirely optional config file for the CLI deploy # see: pyinfra/api/config.py for defaults from pyinfra import hook, local # These can be here or in deploy.py TIMEOUT = 5 FAIL_PERCENT = 81 # Add hooks to be triggered throughout the deploy - separate to the operations @hook.before_connect def ensure_branch(data, state): # Check something local is correct, etc branch = local.shell('git rev-parse --abbrev-ref HEAD') app_branch = data.app_branch if branch != app_branch: # Raise hook.Error for pyinfra to handle raise hook.Error('We\'re on the wrong branch (want {0}, got {1})!'.format( app_branch, branch, )) @hook.after_deploy def notify_people(data, state): print('After deploy hook!')
# pyinfra # File: pyinfra/example/config.py # Desc: entirely optional config file for the CLI deploy # see: pyinfra/api/config.py for defaults from pyinfra import hook # These can be here or in deploy.py TIMEOUT = 5 FAIL_PERCENT = 81 # Add hooks to be triggered throughout the deploy - separate to the operations @hook.before_connect def ensure_branch(data, state): # Run JS bundle build pre-deploy # local.shell('yarn run build') print('Before connect hook!') @hook.after_deploy def notify_people(data, state): print('After deploy hook!')
Make it possible to run examples on any branch!
Make it possible to run examples on any branch!
Python
mit
Fizzadar/pyinfra,Fizzadar/pyinfra
--- +++ @@ -3,7 +3,7 @@ # Desc: entirely optional config file for the CLI deploy # see: pyinfra/api/config.py for defaults -from pyinfra import hook, local +from pyinfra import hook # These can be here or in deploy.py @@ -14,15 +14,9 @@ # Add hooks to be triggered throughout the deploy - separate to the operations @hook.before_connect def ensure_branch(data, state): - # Check something local is correct, etc - branch = local.shell('git rev-parse --abbrev-ref HEAD') - app_branch = data.app_branch - - if branch != app_branch: - # Raise hook.Error for pyinfra to handle - raise hook.Error('We\'re on the wrong branch (want {0}, got {1})!'.format( - app_branch, branch, - )) + # Run JS bundle build pre-deploy + # local.shell('yarn run build') + print('Before connect hook!') @hook.after_deploy
e8bb04f0084e0c722c21fc9c5950cb1b5370dd22
Tools/scripts/byteyears.py
Tools/scripts/byteyears.py
#! /usr/local/python # byteyears file ... # # Print a number representing the product of age and size of each file, # in suitable units. import sys, posix, time from stat import * secs_per_year = 365.0 * 24.0 * 3600.0 now = time.time() status = 0 for file in sys.argv[1:]: try: st = posix.stat(file) except posix.error, msg: sys.stderr.write('can\'t stat ' + `file` + ': ' + `msg` + '\n') status = 1 st = () if st: mtime = st[ST_MTIME] size = st[ST_SIZE] age = now - mtime byteyears = float(size) * float(age) / secs_per_year print file + '\t\t' + `int(byteyears)` sys.exit(status)
#! /usr/local/python # Print the product of age and size of each file, in suitable units. # # Usage: byteyears [ -a | -m | -c ] file ... # # Options -[amc] select atime, mtime (default) or ctime as age. import sys, posix, time import string from stat import * # Use lstat() to stat files if it exists, else stat() try: statfunc = posix.lstat except NameError: statfunc = posix.stat # Parse options if sys.argv[1] = '-m': itime = ST_MTIME del sys.argv[1] elif sys.argv[1] = '-c': itime = ST_CTIME del sys.argv[1] elif sys.argv[1] = '-a': itime = ST_CTIME del sys.argv[1] else: itime = ST_MTIME secs_per_year = 365.0 * 24.0 * 3600.0 # Scale factor now = time.time() # Current time, for age computations status = 0 # Exit status, set to 1 on errors # Compute max file name length maxlen = 1 for file in sys.argv[1:]: if len(file) > maxlen: maxlen = len(file) # Process each argument in turn for file in sys.argv[1:]: try: st = statfunc(file) except posix.error, msg: sys.stderr.write('can\'t stat ' + `file` + ': ' + `msg` + '\n') status = 1 st = () if st: anytime = st[itime] size = st[ST_SIZE] age = now - anytime byteyears = float(size) * float(age) / secs_per_year print string.ljust(file, maxlen), print string.rjust(`int(byteyears)`, 8) sys.exit(status)
Add options -amc; do lstat if possible; columnize properly.
Add options -amc; do lstat if possible; columnize properly.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
--- +++ @@ -1,29 +1,57 @@ #! /usr/local/python -# byteyears file ... +# Print the product of age and size of each file, in suitable units. # -# Print a number representing the product of age and size of each file, -# in suitable units. +# Usage: byteyears [ -a | -m | -c ] file ... +# +# Options -[amc] select atime, mtime (default) or ctime as age. import sys, posix, time +import string from stat import * -secs_per_year = 365.0 * 24.0 * 3600.0 -now = time.time() -status = 0 +# Use lstat() to stat files if it exists, else stat() +try: + statfunc = posix.lstat +except NameError: + statfunc = posix.stat +# Parse options +if sys.argv[1] = '-m': + itime = ST_MTIME + del sys.argv[1] +elif sys.argv[1] = '-c': + itime = ST_CTIME + del sys.argv[1] +elif sys.argv[1] = '-a': + itime = ST_CTIME + del sys.argv[1] +else: + itime = ST_MTIME + +secs_per_year = 365.0 * 24.0 * 3600.0 # Scale factor +now = time.time() # Current time, for age computations +status = 0 # Exit status, set to 1 on errors + +# Compute max file name length +maxlen = 1 +for file in sys.argv[1:]: + if len(file) > maxlen: maxlen = len(file) + +# Process each argument in turn for file in sys.argv[1:]: try: - st = posix.stat(file) + st = statfunc(file) except posix.error, msg: sys.stderr.write('can\'t stat ' + `file` + ': ' + `msg` + '\n') status = 1 st = () if st: - mtime = st[ST_MTIME] + anytime = st[itime] size = st[ST_SIZE] - age = now - mtime + age = now - anytime byteyears = float(size) * float(age) / secs_per_year - print file + '\t\t' + `int(byteyears)` + print string.ljust(file, maxlen), + print string.rjust(`int(byteyears)`, 8) sys.exit(status)
298dc9be1d9e85e79cdbaa95ef9cab1986fe87a7
saleor/product/migrations/0026_auto_20170102_0927.py
saleor/product/migrations/0026_auto_20170102_0927.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2017-01-02 15:27 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('product', '0025_auto_20161219_0517'), ] operations = [ migrations.CreateModel( name='Collection', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='name', unique=True,)), ('products', models.ManyToManyField(to='product.Product')), ], ), migrations.AlterField( model_name='stocklocation', name='name', field=models.CharField(max_length=100, verbose_name='location'), ), ]
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2017-01-02 15:27 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('product', '0025_auto_20161219_0517'), ] operations = [ migrations.CreateModel( name='Collection', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=128, verbose_name='name', unique=True,)), ('products', models.ManyToManyField(to='product.Product')), ], ), ]
Remove unrelated thing from migration
Remove unrelated thing from migration
Python
bsd-3-clause
UITools/saleor,mociepka/saleor,mociepka/saleor,mociepka/saleor,UITools/saleor,UITools/saleor,maferelo/saleor,UITools/saleor,maferelo/saleor,UITools/saleor,maferelo/saleor
--- +++ @@ -20,9 +20,4 @@ ('products', models.ManyToManyField(to='product.Product')), ], ), - migrations.AlterField( - model_name='stocklocation', - name='name', - field=models.CharField(max_length=100, verbose_name='location'), - ), ]
3d42553ae6acd452e122a1a89851e4693a89abde
build.py
build.py
import os from flask.ext.frozen import Freezer import webassets from content import app, assets bundle_files = [] for bundle in assets: assert isinstance(bundle, webassets.Bundle) print("Building bundle {}".format(bundle.output)) bundle.build(force=True, disable_cache=True) bundle_files.append(bundle.output) print("Freezing") app.config['FREEZER_DESTINATION'] = os.path.abspath("static") app.config['FREEZER_BASE_URL'] = "http://dabo.guru/" app.config['FREEZER_DESTINATION_IGNORE'] = bundle_files freezer = Freezer(app=app, with_static_files=False, with_no_argument_rules=True, log_url_for=True) freezer.freeze()
import os from flask.ext.frozen import Freezer import webassets from content import app, assets bundle_files = [] for bundle in assets: assert isinstance(bundle, webassets.Bundle) print("Building bundle {}".format(bundle.output)) bundle.build(force=True, disable_cache=True) bundle_files.append(bundle.output) # This is a copy of Freezer.no_argument_rules() modified to ignore certain paths def no_argument_rules_urls_with_ignore(): """URL generator for URL rules that take no arguments.""" ignored = app.config.get('FREEZER_IGNORE_ENDPOINTS', []) for rule in app.url_map.iter_rules(): if rule.endpoint not in ignored and not rule.arguments and 'GET' in rule.methods: yield rule.endpoint, {} app.config['FREEZER_DESTINATION'] = os.path.abspath("static") app.config['FREEZER_BASE_URL'] = "http://dabo.guru/" app.config['FREEZER_DESTINATION_IGNORE'] = bundle_files app.config['FREEZER_IGNORE_ENDPOINTS'] = ['oauth_respond', 'mc_api_name', 'mc_api_uuid', 'serve_markdown'] freezer = Freezer(app=app, with_static_files=False, with_no_argument_rules=False, log_url_for=True) freezer.register_generator(no_argument_rules_urls_with_ignore) print("Freezing") freezer.freeze()
Add workaround for frozen-flask generating static pages for dynamic api pages like /oauth/.
Add workaround for frozen-flask generating static pages for dynamic api pages like /oauth/.
Python
apache-2.0
daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru,daboross/dabo.guru
--- +++ @@ -12,11 +12,22 @@ bundle.build(force=True, disable_cache=True) bundle_files.append(bundle.output) -print("Freezing") + +# This is a copy of Freezer.no_argument_rules() modified to ignore certain paths +def no_argument_rules_urls_with_ignore(): + """URL generator for URL rules that take no arguments.""" + ignored = app.config.get('FREEZER_IGNORE_ENDPOINTS', []) + for rule in app.url_map.iter_rules(): + if rule.endpoint not in ignored and not rule.arguments and 'GET' in rule.methods: + yield rule.endpoint, {} + app.config['FREEZER_DESTINATION'] = os.path.abspath("static") app.config['FREEZER_BASE_URL'] = "http://dabo.guru/" app.config['FREEZER_DESTINATION_IGNORE'] = bundle_files -freezer = Freezer(app=app, with_static_files=False, with_no_argument_rules=True, log_url_for=True) +app.config['FREEZER_IGNORE_ENDPOINTS'] = ['oauth_respond', 'mc_api_name', 'mc_api_uuid', 'serve_markdown'] +freezer = Freezer(app=app, with_static_files=False, with_no_argument_rules=False, log_url_for=True) +freezer.register_generator(no_argument_rules_urls_with_ignore) +print("Freezing") freezer.freeze()
c1a4e9c83aa20ad333c4d6a1c9e53a732540ea39
jump_to_file.py
jump_to_file.py
import sublime import sublime_plugin import os class JumpToFile(sublime_plugin.TextCommand): def run(self, edit = None): view = self.view for region in view.sel(): if view.score_selector(region.begin(), "parameter.url, string.quoted"): # The scope includes the quote characters, so we slice them off try_file = view.substr(view.extract_scope(region.begin()))[1:-1] view_file = view.file_name() if view_file: view_dir = os.path.dirname(view_file) try_file = os.path.join(view_dir, try_file) if not os.path.isfile(try_file): try_file += '.rb' if os.path.isfile(try_file): view.window().open_file(try_file) else: sublime.status_message("Unable to find a file in the current selection")
import sublime import sublime_plugin import os class JumpToFile(sublime_plugin.TextCommand): def _try_open(self, try_file, path=None): if path: try_file = os.path.join(path, try_file) if not os.path.isfile(try_file): try_file += '.rb' if os.path.isfile(try_file): self.view.window().open_file(try_file) return True else: msg = "Not a file: %s" % try_file print(msg) sublime.status_message(msg) return False def run(self, edit = None): view = self.view for region in view.sel(): if view.score_selector(region.begin(), "parameter.url, string.quoted"): # The scope includes the quote characters, so we slice them off try_file = view.substr(view.extract_scope(region.begin()))[1:-1] if os.path.isabs(try_file): self._try_open(try_file) continue folders = view.window().folders() if folders: for folder in folders: if self._try_open(try_file, folder): continue view_file = view.file_name() if view_file: view_dir = os.path.dirname(view_file) self._try_open(try_file, view_dir)
Add support for paths relative to project folders
Add support for paths relative to project folders
Python
mit
russelldavis/sublimerc
--- +++ @@ -3,19 +3,41 @@ import os class JumpToFile(sublime_plugin.TextCommand): + def _try_open(self, try_file, path=None): + if path: + try_file = os.path.join(path, try_file) + + if not os.path.isfile(try_file): + try_file += '.rb' + + if os.path.isfile(try_file): + self.view.window().open_file(try_file) + return True + else: + msg = "Not a file: %s" % try_file + print(msg) + sublime.status_message(msg) + return False + + def run(self, edit = None): view = self.view for region in view.sel(): if view.score_selector(region.begin(), "parameter.url, string.quoted"): # The scope includes the quote characters, so we slice them off try_file = view.substr(view.extract_scope(region.begin()))[1:-1] + + if os.path.isabs(try_file): + self._try_open(try_file) + continue + + folders = view.window().folders() + if folders: + for folder in folders: + if self._try_open(try_file, folder): + continue + view_file = view.file_name() if view_file: view_dir = os.path.dirname(view_file) - try_file = os.path.join(view_dir, try_file) - if not os.path.isfile(try_file): - try_file += '.rb' - if os.path.isfile(try_file): - view.window().open_file(try_file) - else: - sublime.status_message("Unable to find a file in the current selection") + self._try_open(try_file, view_dir)
a1b47d442290ea9ce19e25cd03c1aa5e39ad2ec5
scikits/learn/tests/test_pca.py
scikits/learn/tests/test_pca.py
from nose.tools import assert_equals from .. import datasets from ..pca import PCA iris = datasets.load_iris() X = iris.data def test_pca(): """ PCA """ pca = PCA(k=2) X_r = pca.fit(X).transform(X) assert_equals(X_r.shape[1], 2) pca = PCA() pca.fit(X) assert_equals(pca.explained_variance_.sum(), 1.0)
import numpy as np from .. import datasets from ..pca import PCA iris = datasets.load_iris() X = iris.data def test_pca(): """ PCA """ pca = PCA(k=2) X_r = pca.fit(X).transform(X) np.testing.assert_equal(X_r.shape[1], 2) pca = PCA() pca.fit(X) np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
Fix tests to be moroe robust
BUG: Fix tests to be moroe robust
Python
bsd-3-clause
nvoron23/scikit-learn,B3AU/waveTree,sumspr/scikit-learn,frank-tancf/scikit-learn,madjelan/scikit-learn,mattilyra/scikit-learn,xzh86/scikit-learn,mwv/scikit-learn,yunfeilu/scikit-learn,JsNoNo/scikit-learn,scikit-learn/scikit-learn,Fireblend/scikit-learn,btabibian/scikit-learn,davidgbe/scikit-learn,arabenjamin/scikit-learn,kaichogami/scikit-learn,r-mart/scikit-learn,Titan-C/scikit-learn,JPFrancoia/scikit-learn,sanketloke/scikit-learn,lbishal/scikit-learn,ogrisel/scikit-learn,cdegroc/scikit-learn,andaag/scikit-learn,AlexanderFabisch/scikit-learn,aetilley/scikit-learn,sgenoud/scikit-learn,costypetrisor/scikit-learn,themrmax/scikit-learn,russel1237/scikit-learn,dsullivan7/scikit-learn,ZENGXH/scikit-learn,jereze/scikit-learn,zorojean/scikit-learn,Myasuka/scikit-learn,florian-f/sklearn,NunoEdgarGub1/scikit-learn,abimannans/scikit-learn,pianomania/scikit-learn,ankurankan/scikit-learn,kaichogami/scikit-learn,rahuldhote/scikit-learn,davidgbe/scikit-learn,giorgiop/scikit-learn,Obus/scikit-learn,kagayakidan/scikit-learn,PatrickOReilly/scikit-learn,jmschrei/scikit-learn,PatrickChrist/scikit-learn,ssaeger/scikit-learn,spallavolu/scikit-learn,liberatorqjw/scikit-learn,aflaxman/scikit-learn,jjx02230808/project0223,robbymeals/scikit-learn,wazeerzulfikar/scikit-learn,vivekmishra1991/scikit-learn,B3AU/waveTree,bigdataelephants/scikit-learn,sonnyhu/scikit-learn,ilo10/scikit-learn,cwu2011/scikit-learn,maheshakya/scikit-learn,mlyundin/scikit-learn,depet/scikit-learn,ChanderG/scikit-learn,imaculate/scikit-learn,lenovor/scikit-learn,pnedunuri/scikit-learn,ominux/scikit-learn,roxyboy/scikit-learn,Akshay0724/scikit-learn,scikit-learn/scikit-learn,alexsavio/scikit-learn,eickenberg/scikit-learn,ashhher3/scikit-learn,hsuantien/scikit-learn,chrsrds/scikit-learn,Garrett-R/scikit-learn,loli/sklearn-ensembletrees,voxlol/scikit-learn,tawsifkhan/scikit-learn,ChanChiChoi/scikit-learn,qifeigit/scikit-learn,DonBeo/scikit-learn,Nyker510/scikit-learn,tawsifkhan/scikit-learn,iismd17/scikit-learn,ilo10/scikit-learn,UNR-AERIAL/scikit-learn,pv/scikit-learn,pianomania/scikit-learn,marcocaccin/scikit-learn,q1ang/scikit-learn,jkarnows/scikit-learn,procoder317/scikit-learn,dsullivan7/scikit-learn,tosolveit/scikit-learn,abimannans/scikit-learn,PrashntS/scikit-learn,ahoyosid/scikit-learn,pv/scikit-learn,ndingwall/scikit-learn,manashmndl/scikit-learn,mlyundin/scikit-learn,joernhees/scikit-learn,glennq/scikit-learn,hainm/scikit-learn,stylianos-kampakis/scikit-learn,yonglehou/scikit-learn,aminert/scikit-learn,vshtanko/scikit-learn,aetilley/scikit-learn,Garrett-R/scikit-learn,Adai0808/scikit-learn,manhhomienbienthuy/scikit-learn,kylerbrown/scikit-learn,vinayak-mehta/scikit-learn,fabioticconi/scikit-learn,glemaitre/scikit-learn,madjelan/scikit-learn,Lawrence-Liu/scikit-learn,evgchz/scikit-learn,jakobworldpeace/scikit-learn,IshankGulati/scikit-learn,jorik041/scikit-learn,hugobowne/scikit-learn,mlyundin/scikit-learn,elkingtonmcb/scikit-learn,vinayak-mehta/scikit-learn,anurag313/scikit-learn,dingocuster/scikit-learn,ky822/scikit-learn,ZENGXH/scikit-learn,devanshdalal/scikit-learn,xubenben/scikit-learn,ZENGXH/scikit-learn,TomDLT/scikit-learn,arabenjamin/scikit-learn,victorbergelin/scikit-learn,hlin117/scikit-learn,elkingtonmcb/scikit-learn,devanshdalal/scikit-learn,shusenl/scikit-learn,luo66/scikit-learn,MartinDelzant/scikit-learn,wlamond/scikit-learn,huzq/scikit-learn,victorbergelin/scikit-learn,Lawrence-Liu/scikit-learn,scikit-learn/scikit-learn,pompiduskus/scikit-learn,alvarofierroclavero/scikit-learn,q1ang/scikit-learn,massmutual/scikit-learn,lenovor/scikit-learn,depet/scikit-learn,ivannz/scikit-learn,larsmans/scikit-learn,MartinSavc/scikit-learn,ogrisel/scikit-learn,LohithBlaze/scikit-learn,AlexandreAbraham/scikit-learn,JPFrancoia/scikit-learn,UNR-AERIAL/scikit-learn,YinongLong/scikit-learn,qifeigit/scikit-learn,0x0all/scikit-learn,Vimos/scikit-learn,spallavolu/scikit-learn,Fireblend/scikit-learn,ZENGXH/scikit-learn,3manuek/scikit-learn,MechCoder/scikit-learn,nvoron23/scikit-learn,massmutual/scikit-learn,IshankGulati/scikit-learn,petosegan/scikit-learn,tdhopper/scikit-learn,evgchz/scikit-learn,xwolf12/scikit-learn,mattgiguere/scikit-learn,Obus/scikit-learn,glennq/scikit-learn,vybstat/scikit-learn,evgchz/scikit-learn,JsNoNo/scikit-learn,kmike/scikit-learn,jmschrei/scikit-learn,abhishekgahlot/scikit-learn,mhdella/scikit-learn,DSLituiev/scikit-learn,thilbern/scikit-learn,untom/scikit-learn,CVML/scikit-learn,wanggang3333/scikit-learn,arabenjamin/scikit-learn,nomadcube/scikit-learn,xubenben/scikit-learn,mjudsp/Tsallis,nikitasingh981/scikit-learn,arahuja/scikit-learn,samuel1208/scikit-learn,glouppe/scikit-learn,hdmetor/scikit-learn,walterreade/scikit-learn,ClimbsRocks/scikit-learn,maheshakya/scikit-learn,mikebenfield/scikit-learn,macks22/scikit-learn,henrykironde/scikit-learn,f3r/scikit-learn,pratapvardhan/scikit-learn,khkaminska/scikit-learn,cybernet14/scikit-learn,hsuantien/scikit-learn,vigilv/scikit-learn,fbagirov/scikit-learn,abhishekgahlot/scikit-learn,jaidevd/scikit-learn,3manuek/scikit-learn,sarahgrogan/scikit-learn,ltiao/scikit-learn,florian-f/sklearn,idlead/scikit-learn,rohanp/scikit-learn,altairpearl/scikit-learn,rrohan/scikit-learn,btabibian/scikit-learn,lucidfrontier45/scikit-learn,Nyker510/scikit-learn,shyamalschandra/scikit-learn,giorgiop/scikit-learn,OshynSong/scikit-learn,xuewei4d/scikit-learn,glemaitre/scikit-learn,samuel1208/scikit-learn,terkkila/scikit-learn,YinongLong/scikit-learn,icdishb/scikit-learn,mhdella/scikit-learn,imaculate/scikit-learn,kjung/scikit-learn,NunoEdgarGub1/scikit-learn,clemkoa/scikit-learn,davidgbe/scikit-learn,yunfeilu/scikit-learn,lbishal/scikit-learn,petosegan/scikit-learn,rahul-c1/scikit-learn,ZenDevelopmentSystems/scikit-learn,xavierwu/scikit-learn,zorroblue/scikit-learn,mikebenfield/scikit-learn,vortex-ape/scikit-learn,vinayak-mehta/scikit-learn,shusenl/scikit-learn,saiwing-yeung/scikit-learn,bigdataelephants/scikit-learn,voxlol/scikit-learn,CVML/scikit-learn,adamgreenhall/scikit-learn,clemkoa/scikit-learn,etkirsch/scikit-learn,DSLituiev/scikit-learn,rajat1994/scikit-learn,jzt5132/scikit-learn,ningchi/scikit-learn,xubenben/scikit-learn,anntzer/scikit-learn,Sentient07/scikit-learn,sanketloke/scikit-learn,zhenv5/scikit-learn,0asa/scikit-learn,imaculate/scikit-learn,yask123/scikit-learn,justincassidy/scikit-learn,heli522/scikit-learn,petosegan/scikit-learn,cainiaocome/scikit-learn,jakirkham/scikit-learn,lazywei/scikit-learn,pnedunuri/scikit-learn,kashif/scikit-learn,tawsifkhan/scikit-learn,frank-tancf/scikit-learn,cl4rke/scikit-learn,abhishekkrthakur/scikit-learn,Titan-C/scikit-learn,florian-f/sklearn,pratapvardhan/scikit-learn,zorojean/scikit-learn,henrykironde/scikit-learn,hrjn/scikit-learn,poryfly/scikit-learn,appapantula/scikit-learn,aewhatley/scikit-learn,manashmndl/scikit-learn,marcocaccin/scikit-learn,mjgrav2001/scikit-learn,ilyes14/scikit-learn,MechCoder/scikit-learn,Djabbz/scikit-learn,robin-lai/scikit-learn,MechCoder/scikit-learn,trankmichael/scikit-learn,appapantula/scikit-learn,JeanKossaifi/scikit-learn,mfjb/scikit-learn,Windy-Ground/scikit-learn,BiaDarkia/scikit-learn,ClimbsRocks/scikit-learn,0x0all/scikit-learn,MartinSavc/scikit-learn,eickenberg/scikit-learn,murali-munna/scikit-learn,murali-munna/scikit-learn,gclenaghan/scikit-learn,IndraVikas/scikit-learn,shenzebang/scikit-learn,hugobowne/scikit-learn,pythonvietnam/scikit-learn,fyffyt/scikit-learn,kevin-intel/scikit-learn,yunfeilu/scikit-learn,Srisai85/scikit-learn,evgchz/scikit-learn,Adai0808/scikit-learn,nesterione/scikit-learn,elkingtonmcb/scikit-learn,zhenv5/scikit-learn,lesteve/scikit-learn,ngoix/OCRF,fengzhyuan/scikit-learn,kagayakidan/scikit-learn,BiaDarkia/scikit-learn,ogrisel/scikit-learn,dhruv13J/scikit-learn,vigilv/scikit-learn,jpautom/scikit-learn,adamgreenhall/scikit-learn,alexeyum/scikit-learn,ElDeveloper/scikit-learn,espg/scikit-learn,russel1237/scikit-learn,ldirer/scikit-learn,PrashntS/scikit-learn,IshankGulati/scikit-learn,xavierwu/scikit-learn,LohithBlaze/scikit-learn,tdhopper/scikit-learn,trankmichael/scikit-learn,icdishb/scikit-learn,PatrickChrist/scikit-learn,LiaoPan/scikit-learn,pypot/scikit-learn,deepesch/scikit-learn,walterreade/scikit-learn,vibhorag/scikit-learn,mojoboss/scikit-learn,ephes/scikit-learn,jjx02230808/project0223,wlamond/scikit-learn,yonglehou/scikit-learn,shangwuhencc/scikit-learn,icdishb/scikit-learn,dsquareindia/scikit-learn,jm-begon/scikit-learn,pianomania/scikit-learn,ankurankan/scikit-learn,PatrickOReilly/scikit-learn,nrhine1/scikit-learn,adamgreenhall/scikit-learn,plissonf/scikit-learn,imaculate/scikit-learn,CforED/Machine-Learning,liberatorqjw/scikit-learn,0asa/scikit-learn,ishanic/scikit-learn,potash/scikit-learn,nmayorov/scikit-learn,treycausey/scikit-learn,idlead/scikit-learn,Aasmi/scikit-learn,aabadie/scikit-learn,joernhees/scikit-learn,wazeerzulfikar/scikit-learn,ashhher3/scikit-learn,saiwing-yeung/scikit-learn,rahuldhote/scikit-learn,chrsrds/scikit-learn,nomadcube/scikit-learn,loli/semisupervisedforests,toastedcornflakes/scikit-learn,herilalaina/scikit-learn,nvoron23/scikit-learn,olologin/scikit-learn,Fireblend/scikit-learn,zihua/scikit-learn,0x0all/scikit-learn,costypetrisor/scikit-learn,jpautom/scikit-learn,hitszxp/scikit-learn,eg-zhang/scikit-learn,Achuth17/scikit-learn,fyffyt/scikit-learn,beepee14/scikit-learn,kaichogami/scikit-learn,vivekmishra1991/scikit-learn,cybernet14/scikit-learn,themrmax/scikit-learn,kjung/scikit-learn,depet/scikit-learn,jereze/scikit-learn,DSLituiev/scikit-learn,LiaoPan/scikit-learn,eg-zhang/scikit-learn,phdowling/scikit-learn,robin-lai/scikit-learn,justincassidy/scikit-learn,PatrickOReilly/scikit-learn,hsiaoyi0504/scikit-learn,themrmax/scikit-learn,sarahgrogan/scikit-learn,fabioticconi/scikit-learn,liyu1990/sklearn,ChanChiChoi/scikit-learn,mwv/scikit-learn,nhejazi/scikit-learn,henrykironde/scikit-learn,vibhorag/scikit-learn,mikebenfield/scikit-learn,JosmanPS/scikit-learn,altairpearl/scikit-learn,xzh86/scikit-learn,billy-inn/scikit-learn,aewhatley/scikit-learn,mfjb/scikit-learn,glennq/scikit-learn,mattilyra/scikit-learn,cwu2011/scikit-learn,fredhusser/scikit-learn,arjoly/scikit-learn,jkarnows/scikit-learn,tosolveit/scikit-learn,shikhardb/scikit-learn,fabianp/scikit-learn,ilyes14/scikit-learn,equialgo/scikit-learn,robbymeals/scikit-learn,MechCoder/scikit-learn,bikong2/scikit-learn,IndraVikas/scikit-learn,glennq/scikit-learn,ltiao/scikit-learn,samzhang111/scikit-learn,alvarofierroclavero/scikit-learn,gclenaghan/scikit-learn,manashmndl/scikit-learn,harshaneelhg/scikit-learn,poryfly/scikit-learn,Aasmi/scikit-learn,loli/semisupervisedforests,LohithBlaze/scikit-learn,bigdataelephants/scikit-learn,eickenberg/scikit-learn,olologin/scikit-learn,quheng/scikit-learn,wzbozon/scikit-learn,thientu/scikit-learn,shenzebang/scikit-learn,ivannz/scikit-learn,tmhm/scikit-learn,espg/scikit-learn,bthirion/scikit-learn,larsmans/scikit-learn,madjelan/scikit-learn,wanggang3333/scikit-learn,wazeerzulfikar/scikit-learn,loli/sklearn-ensembletrees,kylerbrown/scikit-learn,ahoyosid/scikit-learn,bnaul/scikit-learn,jayflo/scikit-learn,mjudsp/Tsallis,CforED/Machine-Learning,eg-zhang/scikit-learn,wzbozon/scikit-learn,mojoboss/scikit-learn,jblackburne/scikit-learn,yonglehou/scikit-learn,hitszxp/scikit-learn,marcocaccin/scikit-learn,lin-credible/scikit-learn,rvraghav93/scikit-learn,wzbozon/scikit-learn,shyamalschandra/scikit-learn,michigraber/scikit-learn,marcocaccin/scikit-learn,Adai0808/scikit-learn,sumspr/scikit-learn,h2educ/scikit-learn,arabenjamin/scikit-learn,nmayorov/scikit-learn,AIML/scikit-learn,loli/sklearn-ensembletrees,ycaihua/scikit-learn,manhhomienbienthuy/scikit-learn,rishikksh20/scikit-learn,rahul-c1/scikit-learn,RachitKansal/scikit-learn,lenovor/scikit-learn,rexshihaoren/scikit-learn,nrhine1/scikit-learn,0x0all/scikit-learn,potash/scikit-learn,samzhang111/scikit-learn,zorroblue/scikit-learn,rohanp/scikit-learn,jzt5132/scikit-learn,jaidevd/scikit-learn,sarahgrogan/scikit-learn,macks22/scikit-learn,Windy-Ground/scikit-learn,dhruv13J/scikit-learn,Sentient07/scikit-learn,amueller/scikit-learn,waterponey/scikit-learn,zuku1985/scikit-learn,AIML/scikit-learn,mblondel/scikit-learn,mlyundin/scikit-learn,betatim/scikit-learn,henridwyer/scikit-learn,hsiaoyi0504/scikit-learn,hdmetor/scikit-learn,NelisVerhoef/scikit-learn,kjung/scikit-learn,chrsrds/scikit-learn,AnasGhrab/scikit-learn,bthirion/scikit-learn,hainm/scikit-learn,sinhrks/scikit-learn,akionakamura/scikit-learn,sinhrks/scikit-learn,lucidfrontier45/scikit-learn,rexshihaoren/scikit-learn,RPGOne/scikit-learn,shangwuhencc/scikit-learn,liangz0707/scikit-learn,olologin/scikit-learn,henridwyer/scikit-learn,Lawrence-Liu/scikit-learn,anntzer/scikit-learn,moutai/scikit-learn,sgenoud/scikit-learn,mhue/scikit-learn,Jimmy-Morzaria/scikit-learn,ElDeveloper/scikit-learn,fengzhyuan/scikit-learn,zorroblue/scikit-learn,gotomypc/scikit-learn,ycaihua/scikit-learn,tosolveit/scikit-learn,sumspr/scikit-learn,ldirer/scikit-learn,rajat1994/scikit-learn,RayMick/scikit-learn,andaag/scikit-learn,themrmax/scikit-learn,btabibian/scikit-learn,hsiaoyi0504/scikit-learn,mwv/scikit-learn,vshtanko/scikit-learn,OshynSong/scikit-learn,alvarofierroclavero/scikit-learn,rexshihaoren/scikit-learn,0asa/scikit-learn,smartscheduling/scikit-learn-categorical-tree,abhishekgahlot/scikit-learn,henridwyer/scikit-learn,aabadie/scikit-learn,MohammedWasim/scikit-learn,samzhang111/scikit-learn,idlead/scikit-learn,simon-pepin/scikit-learn,rishikksh20/scikit-learn,vibhorag/scikit-learn,murali-munna/scikit-learn,liangz0707/scikit-learn,RPGOne/scikit-learn,AlexRobson/scikit-learn,nrhine1/scikit-learn,larsmans/scikit-learn,vermouthmjl/scikit-learn,xavierwu/scikit-learn,abhishekkrthakur/scikit-learn,roxyboy/scikit-learn,Vimos/scikit-learn,rexshihaoren/scikit-learn,ivannz/scikit-learn,aewhatley/scikit-learn,ycaihua/scikit-learn,nikitasingh981/scikit-learn,loli/semisupervisedforests,meduz/scikit-learn,tdhopper/scikit-learn,dsullivan7/scikit-learn,hitszxp/scikit-learn,HolgerPeters/scikit-learn,fyffyt/scikit-learn,pratapvardhan/scikit-learn,rsivapr/scikit-learn,rsivapr/scikit-learn,jseabold/scikit-learn,Barmaley-exe/scikit-learn,smartscheduling/scikit-learn-categorical-tree,sanketloke/scikit-learn,Windy-Ground/scikit-learn,abhishekgahlot/scikit-learn,Barmaley-exe/scikit-learn,vermouthmjl/scikit-learn,shangwuhencc/scikit-learn,ominux/scikit-learn,ankurankan/scikit-learn,deepesch/scikit-learn,voxlol/scikit-learn,andrewnc/scikit-learn,akionakamura/scikit-learn,thilbern/scikit-learn,betatim/scikit-learn,Clyde-fare/scikit-learn,ningchi/scikit-learn,mjudsp/Tsallis,vshtanko/scikit-learn,RomainBrault/scikit-learn,JeanKossaifi/scikit-learn,lbishal/scikit-learn,JeanKossaifi/scikit-learn,eg-zhang/scikit-learn,thientu/scikit-learn,qifeigit/scikit-learn,mblondel/scikit-learn,JsNoNo/scikit-learn,samuel1208/scikit-learn,cwu2011/scikit-learn,larsmans/scikit-learn,fzalkow/scikit-learn,TomDLT/scikit-learn,ningchi/scikit-learn,ngoix/OCRF,trungnt13/scikit-learn,fabioticconi/scikit-learn,nesterione/scikit-learn,maheshakya/scikit-learn,quheng/scikit-learn,mhdella/scikit-learn,maheshakya/scikit-learn,herilalaina/scikit-learn,massmutual/scikit-learn,macks22/scikit-learn,thilbern/scikit-learn,xzh86/scikit-learn,dsullivan7/scikit-learn,glouppe/scikit-learn,shahankhatch/scikit-learn,ycaihua/scikit-learn,untom/scikit-learn,yask123/scikit-learn,smartscheduling/scikit-learn-categorical-tree,akionakamura/scikit-learn,BiaDarkia/scikit-learn,yask123/scikit-learn,schets/scikit-learn,IssamLaradji/scikit-learn,simon-pepin/scikit-learn,shahankhatch/scikit-learn,rahul-c1/scikit-learn,466152112/scikit-learn,macks22/scikit-learn,jpautom/scikit-learn,frank-tancf/scikit-learn,harshaneelhg/scikit-learn,ChanChiChoi/scikit-learn,cauchycui/scikit-learn,mehdidc/scikit-learn,jakirkham/scikit-learn,nelson-liu/scikit-learn,quheng/scikit-learn,bnaul/scikit-learn,moutai/scikit-learn,herilalaina/scikit-learn,anntzer/scikit-learn,ilyes14/scikit-learn,gotomypc/scikit-learn,LiaoPan/scikit-learn,vybstat/scikit-learn,RachitKansal/scikit-learn,ilo10/scikit-learn,thientu/scikit-learn,rajat1994/scikit-learn,Clyde-fare/scikit-learn,Jimmy-Morzaria/scikit-learn,adamgreenhall/scikit-learn,billy-inn/scikit-learn,RPGOne/scikit-learn,poryfly/scikit-learn,ndingwall/scikit-learn,yunfeilu/scikit-learn,depet/scikit-learn,jorge2703/scikit-learn,petosegan/scikit-learn,mehdidc/scikit-learn,zhenv5/scikit-learn,kmike/scikit-learn,alexsavio/scikit-learn,jblackburne/scikit-learn,RomainBrault/scikit-learn,Windy-Ground/scikit-learn,alvarofierroclavero/scikit-learn,spallavolu/scikit-learn,AnasGhrab/scikit-learn,fredhusser/scikit-learn,shangwuhencc/scikit-learn,Djabbz/scikit-learn,potash/scikit-learn,luo66/scikit-learn,aminert/scikit-learn,xyguo/scikit-learn,Jimmy-Morzaria/scikit-learn,florian-f/sklearn,pypot/scikit-learn,cdegroc/scikit-learn,nesterione/scikit-learn,billy-inn/scikit-learn,robin-lai/scikit-learn,mattilyra/scikit-learn,dsquareindia/scikit-learn,liangz0707/scikit-learn,rrohan/scikit-learn,ky822/scikit-learn,theoryno3/scikit-learn,lucidfrontier45/scikit-learn,xiaoxiamii/scikit-learn,victorbergelin/scikit-learn,jkarnows/scikit-learn,pythonvietnam/scikit-learn,carrillo/scikit-learn,massmutual/scikit-learn,jlegendary/scikit-learn,wlamond/scikit-learn,nelson-liu/scikit-learn,moutai/scikit-learn,vermouthmjl/scikit-learn,jereze/scikit-learn,fredhusser/scikit-learn,dingocuster/scikit-learn,tmhm/scikit-learn,alexsavio/scikit-learn,AlexRobson/scikit-learn,clemkoa/scikit-learn,RachitKansal/scikit-learn,krez13/scikit-learn,JosmanPS/scikit-learn,kashif/scikit-learn,sgenoud/scikit-learn,MartinSavc/scikit-learn,mxjl620/scikit-learn,bthirion/scikit-learn,f3r/scikit-learn,evgchz/scikit-learn,nrhine1/scikit-learn,manhhomienbienthuy/scikit-learn,meduz/scikit-learn,carrillo/scikit-learn,xiaoxiamii/scikit-learn,meduz/scikit-learn,466152112/scikit-learn,wzbozon/scikit-learn,abimannans/scikit-learn,chrisburr/scikit-learn,mjgrav2001/scikit-learn,waterponey/scikit-learn,IssamLaradji/scikit-learn,cauchycui/scikit-learn,nesterione/scikit-learn,ankurankan/scikit-learn,Nyker510/scikit-learn,jjx02230808/project0223,hrjn/scikit-learn,trungnt13/scikit-learn,mehdidc/scikit-learn,nelson-liu/scikit-learn,wlamond/scikit-learn,henrykironde/scikit-learn,JPFrancoia/scikit-learn,joshloyal/scikit-learn,equialgo/scikit-learn,belltailjp/scikit-learn,belltailjp/scikit-learn,vigilv/scikit-learn,kagayakidan/scikit-learn,jayflo/scikit-learn,TomDLT/scikit-learn,pypot/scikit-learn,mjgrav2001/scikit-learn,jmschrei/scikit-learn,xavierwu/scikit-learn,djgagne/scikit-learn,cauchycui/scikit-learn,ChanderG/scikit-learn,mattgiguere/scikit-learn,anntzer/scikit-learn,ephes/scikit-learn,YinongLong/scikit-learn,belltailjp/scikit-learn,ndingwall/scikit-learn,etkirsch/scikit-learn,yanlend/scikit-learn,jmetzen/scikit-learn,sergeyf/scikit-learn,eickenberg/scikit-learn,kevin-intel/scikit-learn,mugizico/scikit-learn,sanketloke/scikit-learn,bikong2/scikit-learn,MartinDelzant/scikit-learn,russel1237/scikit-learn,HolgerPeters/scikit-learn,equialgo/scikit-learn,liberatorqjw/scikit-learn,RayMick/scikit-learn,espg/scikit-learn,poryfly/scikit-learn,shyamalschandra/scikit-learn,vermouthmjl/scikit-learn,joshloyal/scikit-learn,samuel1208/scikit-learn,arjoly/scikit-learn,arjoly/scikit-learn,IndraVikas/scikit-learn,tomlof/scikit-learn,ssaeger/scikit-learn,yyjiang/scikit-learn,cl4rke/scikit-learn,ChanderG/scikit-learn,clemkoa/scikit-learn,NelisVerhoef/scikit-learn,466152112/scikit-learn,dsquareindia/scikit-learn,appapantula/scikit-learn,loli/semisupervisedforests,zorojean/scikit-learn,alexeyum/scikit-learn,hsuantien/scikit-learn,RayMick/scikit-learn,ssaeger/scikit-learn,Obus/scikit-learn,hugobowne/scikit-learn,aabadie/scikit-learn,kjung/scikit-learn,sumspr/scikit-learn,robbymeals/scikit-learn,hdmetor/scikit-learn,andrewnc/scikit-learn,thilbern/scikit-learn,ogrisel/scikit-learn,shahankhatch/scikit-learn,elkingtonmcb/scikit-learn,rvraghav93/scikit-learn,rahuldhote/scikit-learn,ZenDevelopmentSystems/scikit-learn,fyffyt/scikit-learn,mblondel/scikit-learn,joshloyal/scikit-learn,TomDLT/scikit-learn,NelisVerhoef/scikit-learn,abhishekkrthakur/scikit-learn,khkaminska/scikit-learn,NunoEdgarGub1/scikit-learn,fabianp/scikit-learn,sonnyhu/scikit-learn,liyu1990/sklearn,anurag313/scikit-learn,anirudhjayaraman/scikit-learn,aetilley/scikit-learn,yyjiang/scikit-learn,ky822/scikit-learn,CVML/scikit-learn,Jimmy-Morzaria/scikit-learn,nmayorov/scikit-learn,UNR-AERIAL/scikit-learn,thientu/scikit-learn,MartinSavc/scikit-learn,rsivapr/scikit-learn,khkaminska/scikit-learn,RPGOne/scikit-learn,JosmanPS/scikit-learn,pompiduskus/scikit-learn,mattilyra/scikit-learn,jorik041/scikit-learn,jakobworldpeace/scikit-learn,hlin117/scikit-learn,mxjl620/scikit-learn,sgenoud/scikit-learn,sergeyf/scikit-learn,waterponey/scikit-learn,aetilley/scikit-learn,pompiduskus/scikit-learn,olologin/scikit-learn,anurag313/scikit-learn,yanlend/scikit-learn,siutanwong/scikit-learn,zihua/scikit-learn,untom/scikit-learn,amueller/scikit-learn,jakirkham/scikit-learn,shahankhatch/scikit-learn,davidgbe/scikit-learn,Achuth17/scikit-learn,MatthieuBizien/scikit-learn,fbagirov/scikit-learn,phdowling/scikit-learn,hrjn/scikit-learn,AlexandreAbraham/scikit-learn,appapantula/scikit-learn,IshankGulati/scikit-learn,siutanwong/scikit-learn,shikhardb/scikit-learn,zihua/scikit-learn,jmschrei/scikit-learn,lin-credible/scikit-learn,ZenDevelopmentSystems/scikit-learn,djgagne/scikit-learn,3manuek/scikit-learn,equialgo/scikit-learn,lucidfrontier45/scikit-learn,ndingwall/scikit-learn,fengzhyuan/scikit-learn,jorge2703/scikit-learn,mhue/scikit-learn,jseabold/scikit-learn,jorik041/scikit-learn,Barmaley-exe/scikit-learn,pv/scikit-learn,zaxtax/scikit-learn,arahuja/scikit-learn,jorik041/scikit-learn,ankurankan/scikit-learn,bnaul/scikit-learn,jakobworldpeace/scikit-learn,mhdella/scikit-learn,Titan-C/scikit-learn,AIML/scikit-learn,cl4rke/scikit-learn,anirudhjayaraman/scikit-learn,manhhomienbienthuy/scikit-learn,toastedcornflakes/scikit-learn,hdmetor/scikit-learn,terkkila/scikit-learn,HolgerPeters/scikit-learn,joernhees/scikit-learn,idlead/scikit-learn,terkkila/scikit-learn,vortex-ape/scikit-learn,quheng/scikit-learn,stylianos-kampakis/scikit-learn,yask123/scikit-learn,etkirsch/scikit-learn,zhenv5/scikit-learn,sergeyf/scikit-learn,mrshu/scikit-learn,nvoron23/scikit-learn,vivekmishra1991/scikit-learn,ivannz/scikit-learn,ahoyosid/scikit-learn,zaxtax/scikit-learn,pkruskal/scikit-learn,pnedunuri/scikit-learn,abhishekkrthakur/scikit-learn,xiaoxiamii/scikit-learn,ngoix/OCRF,lesteve/scikit-learn,Sentient07/scikit-learn,kylerbrown/scikit-learn,krez13/scikit-learn,trungnt13/scikit-learn,gotomypc/scikit-learn,mayblue9/scikit-learn,pythonvietnam/scikit-learn,DSLituiev/scikit-learn,HolgerPeters/scikit-learn,rsivapr/scikit-learn,maheshakya/scikit-learn,pkruskal/scikit-learn,shyamalschandra/scikit-learn,PatrickChrist/scikit-learn,belltailjp/scikit-learn,beepee14/scikit-learn,glouppe/scikit-learn,toastedcornflakes/scikit-learn,rishikksh20/scikit-learn,f3r/scikit-learn,akionakamura/scikit-learn,anirudhjayaraman/scikit-learn,carrillo/scikit-learn,devanshdalal/scikit-learn,vortex-ape/scikit-learn,mojoboss/scikit-learn,theoryno3/scikit-learn,kevin-intel/scikit-learn,gclenaghan/scikit-learn,wanggang3333/scikit-learn,lazywei/scikit-learn,justincassidy/scikit-learn,zuku1985/scikit-learn,yyjiang/scikit-learn,JosmanPS/scikit-learn,sarahgrogan/scikit-learn,0asa/scikit-learn,Fireblend/scikit-learn,PatrickOReilly/scikit-learn,voxlol/scikit-learn,jm-begon/scikit-learn,cl4rke/scikit-learn,kaichogami/scikit-learn,altairpearl/scikit-learn,AlexRobson/scikit-learn,sinhrks/scikit-learn,RomainBrault/scikit-learn,mhue/scikit-learn,Aasmi/scikit-learn,mayblue9/scikit-learn,liyu1990/sklearn,fabioticconi/scikit-learn,andaag/scikit-learn,ishanic/scikit-learn,heli522/scikit-learn,tosolveit/scikit-learn,rishikksh20/scikit-learn,mayblue9/scikit-learn,murali-munna/scikit-learn,mwv/scikit-learn,rvraghav93/scikit-learn,zuku1985/scikit-learn,raghavrv/scikit-learn,Myasuka/scikit-learn,huzq/scikit-learn,mfjb/scikit-learn,LohithBlaze/scikit-learn,chrsrds/scikit-learn,jseabold/scikit-learn,yanlend/scikit-learn,Adai0808/scikit-learn,tmhm/scikit-learn,pythonvietnam/scikit-learn,huobaowangxi/scikit-learn,theoryno3/scikit-learn,hugobowne/scikit-learn,NunoEdgarGub1/scikit-learn,jayflo/scikit-learn,alexeyum/scikit-learn,terkkila/scikit-learn,ky822/scikit-learn,espg/scikit-learn,ZenDevelopmentSystems/scikit-learn,Achuth17/scikit-learn,jlegendary/scikit-learn,bigdataelephants/scikit-learn,mugizico/scikit-learn,JeanKossaifi/scikit-learn,krez13/scikit-learn,fabianp/scikit-learn,shusenl/scikit-learn,icdishb/scikit-learn,mjudsp/Tsallis,etkirsch/scikit-learn,carrillo/scikit-learn,Nyker510/scikit-learn,moutai/scikit-learn,mattgiguere/scikit-learn,pratapvardhan/scikit-learn,bnaul/scikit-learn,arahuja/scikit-learn,466152112/scikit-learn,rrohan/scikit-learn,ngoix/OCRF,shenzebang/scikit-learn,kagayakidan/scikit-learn,PrashntS/scikit-learn,xwolf12/scikit-learn,Myasuka/scikit-learn,yanlend/scikit-learn,vybstat/scikit-learn,Akshay0724/scikit-learn,plissonf/scikit-learn,devanshdalal/scikit-learn,meduz/scikit-learn,stylianos-kampakis/scikit-learn,shenzebang/scikit-learn,fzalkow/scikit-learn,wazeerzulfikar/scikit-learn,Lawrence-Liu/scikit-learn,ldirer/scikit-learn,liangz0707/scikit-learn,sonnyhu/scikit-learn,Vimos/scikit-learn,plissonf/scikit-learn,ominux/scikit-learn,henridwyer/scikit-learn,huzq/scikit-learn,nikitasingh981/scikit-learn,AlexanderFabisch/scikit-learn,xiaoxiamii/scikit-learn,pianomania/scikit-learn,spallavolu/scikit-learn,tomlof/scikit-learn,trankmichael/scikit-learn,xyguo/scikit-learn,waterponey/scikit-learn,B3AU/waveTree,jseabold/scikit-learn,pkruskal/scikit-learn,roxyboy/scikit-learn,kashif/scikit-learn,jereze/scikit-learn,altairpearl/scikit-learn,cwu2011/scikit-learn,jorge2703/scikit-learn,fabianp/scikit-learn,JPFrancoia/scikit-learn,aflaxman/scikit-learn,andrewnc/scikit-learn,schets/scikit-learn,pompiduskus/scikit-learn,phdowling/scikit-learn,fengzhyuan/scikit-learn,Barmaley-exe/scikit-learn,ashhher3/scikit-learn,roxyboy/scikit-learn,treycausey/scikit-learn,khkaminska/scikit-learn,zaxtax/scikit-learn,q1ang/scikit-learn,justincassidy/scikit-learn,chrisburr/scikit-learn,joernhees/scikit-learn,cdegroc/scikit-learn,vibhorag/scikit-learn,jjx02230808/project0223,manashmndl/scikit-learn,q1ang/scikit-learn,krez13/scikit-learn,sinhrks/scikit-learn,kmike/scikit-learn,CVML/scikit-learn,Garrett-R/scikit-learn,florian-f/sklearn,huobaowangxi/scikit-learn,cainiaocome/scikit-learn,dingocuster/scikit-learn,CforED/Machine-Learning,AlexandreAbraham/scikit-learn,DonBeo/scikit-learn,mattilyra/scikit-learn,pypot/scikit-learn,jm-begon/scikit-learn,gclenaghan/scikit-learn,Djabbz/scikit-learn,Aasmi/scikit-learn,mblondel/scikit-learn,rahul-c1/scikit-learn,jm-begon/scikit-learn,zaxtax/scikit-learn,ngoix/OCRF,tomlof/scikit-learn,aewhatley/scikit-learn,jaidevd/scikit-learn,abimannans/scikit-learn,cauchycui/scikit-learn,toastedcornflakes/scikit-learn,victorbergelin/scikit-learn,h2educ/scikit-learn,russel1237/scikit-learn,treycausey/scikit-learn,xzh86/scikit-learn,Djabbz/scikit-learn,rvraghav93/scikit-learn,kylerbrown/scikit-learn,trungnt13/scikit-learn,ishanic/scikit-learn,phdowling/scikit-learn,vortex-ape/scikit-learn,mugizico/scikit-learn,AlexRobson/scikit-learn,lenovor/scikit-learn,PrashntS/scikit-learn,jmetzen/scikit-learn,cainiaocome/scikit-learn,btabibian/scikit-learn,ldirer/scikit-learn,B3AU/waveTree,aminert/scikit-learn,hlin117/scikit-learn,lin-credible/scikit-learn,depet/scikit-learn,anirudhjayaraman/scikit-learn,h2educ/scikit-learn,chrisburr/scikit-learn,schets/scikit-learn,mfjb/scikit-learn,MartinDelzant/scikit-learn,giorgiop/scikit-learn,heli522/scikit-learn,jayflo/scikit-learn,xuewei4d/scikit-learn,ClimbsRocks/scikit-learn,tmhm/scikit-learn,MatthieuBizien/scikit-learn,vinayak-mehta/scikit-learn,arjoly/scikit-learn,Srisai85/scikit-learn,bhargav/scikit-learn,mjudsp/Tsallis,billy-inn/scikit-learn,herilalaina/scikit-learn,hsiaoyi0504/scikit-learn,jaidevd/scikit-learn,rrohan/scikit-learn,Srisai85/scikit-learn,schets/scikit-learn,MartinDelzant/scikit-learn,aminert/scikit-learn,lucidfrontier45/scikit-learn,vshtanko/scikit-learn,nelson-liu/scikit-learn,BiaDarkia/scikit-learn,larsmans/scikit-learn,cybernet14/scikit-learn,rohanp/scikit-learn,hrjn/scikit-learn,mxjl620/scikit-learn,RomainBrault/scikit-learn,lin-credible/scikit-learn,michigraber/scikit-learn,zihua/scikit-learn,walterreade/scikit-learn,luo66/scikit-learn,hitszxp/scikit-learn,hlin117/scikit-learn,JsNoNo/scikit-learn,untom/scikit-learn,f3r/scikit-learn,beepee14/scikit-learn,andrewnc/scikit-learn,RachitKansal/scikit-learn,kmike/scikit-learn,fzalkow/scikit-learn,ElDeveloper/scikit-learn,RayMick/scikit-learn,chrisburr/scikit-learn,Titan-C/scikit-learn,ominux/scikit-learn,rsivapr/scikit-learn,trankmichael/scikit-learn,kevin-intel/scikit-learn,lbishal/scikit-learn,mehdidc/scikit-learn,Garrett-R/scikit-learn,kashif/scikit-learn,dhruv13J/scikit-learn,jkarnows/scikit-learn,LiaoPan/scikit-learn,simon-pepin/scikit-learn,nikitasingh981/scikit-learn,shusenl/scikit-learn,fzalkow/scikit-learn,jzt5132/scikit-learn,nmayorov/scikit-learn,rajat1994/scikit-learn,h2educ/scikit-learn,huobaowangxi/scikit-learn,ycaihua/scikit-learn,sergeyf/scikit-learn,xubenben/scikit-learn,glemaitre/scikit-learn,nhejazi/scikit-learn,Garrett-R/scikit-learn,abhishekgahlot/scikit-learn,jlegendary/scikit-learn,yyjiang/scikit-learn,raghavrv/scikit-learn,mjgrav2001/scikit-learn,nomadcube/scikit-learn,loli/sklearn-ensembletrees,madjelan/scikit-learn,tomlof/scikit-learn,fredhusser/scikit-learn,MohammedWasim/scikit-learn,betatim/scikit-learn,deepesch/scikit-learn,mrshu/scikit-learn,mayblue9/scikit-learn,deepesch/scikit-learn,bthirion/scikit-learn,0asa/scikit-learn,michigraber/scikit-learn,IssamLaradji/scikit-learn,iismd17/scikit-learn,liberatorqjw/scikit-learn,pkruskal/scikit-learn,mattgiguere/scikit-learn,YinongLong/scikit-learn,djgagne/scikit-learn,Srisai85/scikit-learn,dingocuster/scikit-learn,DonBeo/scikit-learn,alexeyum/scikit-learn,saiwing-yeung/scikit-learn,bhargav/scikit-learn,OshynSong/scikit-learn,mxjl620/scikit-learn,jpautom/scikit-learn,nomadcube/scikit-learn,scikit-learn/scikit-learn,jmetzen/scikit-learn,beepee14/scikit-learn,mikebenfield/scikit-learn,r-mart/scikit-learn,Sentient07/scikit-learn,AIML/scikit-learn,ahoyosid/scikit-learn,frank-tancf/scikit-learn,aflaxman/scikit-learn,xuewei4d/scikit-learn,giorgiop/scikit-learn,IssamLaradji/scikit-learn,0x0all/scikit-learn,ClimbsRocks/scikit-learn,arahuja/scikit-learn,anurag313/scikit-learn,lazywei/scikit-learn,samzhang111/scikit-learn,ElDeveloper/scikit-learn,AlexanderFabisch/scikit-learn,ningchi/scikit-learn,aflaxman/scikit-learn,amueller/scikit-learn,ilyes14/scikit-learn,hsuantien/scikit-learn,Achuth17/scikit-learn,theoryno3/scikit-learn,B3AU/waveTree,procoder317/scikit-learn,CforED/Machine-Learning,huzq/scikit-learn,procoder317/scikit-learn,UNR-AERIAL/scikit-learn,Vimos/scikit-learn,r-mart/scikit-learn,jlegendary/scikit-learn,costypetrisor/scikit-learn,jzt5132/scikit-learn,xwolf12/scikit-learn,iismd17/scikit-learn,smartscheduling/scikit-learn-categorical-tree,DonBeo/scikit-learn,stylianos-kampakis/scikit-learn,jorge2703/scikit-learn,nhejazi/scikit-learn,huobaowangxi/scikit-learn,mrshu/scikit-learn,AlexandreAbraham/scikit-learn,potash/scikit-learn,treycausey/scikit-learn,alexsavio/scikit-learn,mojoboss/scikit-learn,jakobworldpeace/scikit-learn,dhruv13J/scikit-learn,siutanwong/scikit-learn,ChanderG/scikit-learn,Akshay0724/scikit-learn,ephes/scikit-learn,MohammedWasim/scikit-learn,MohammedWasim/scikit-learn,yonglehou/scikit-learn,ltiao/scikit-learn,simon-pepin/scikit-learn,IndraVikas/scikit-learn,shikhardb/scikit-learn,siutanwong/scikit-learn,AnasGhrab/scikit-learn,kmike/scikit-learn,Clyde-fare/scikit-learn,saiwing-yeung/scikit-learn,xwolf12/scikit-learn,Clyde-fare/scikit-learn,bikong2/scikit-learn,cdegroc/scikit-learn,lesteve/scikit-learn,eickenberg/scikit-learn,jblackburne/scikit-learn,xuewei4d/scikit-learn,amueller/scikit-learn,michigraber/scikit-learn,mugizico/scikit-learn,luo66/scikit-learn,procoder317/scikit-learn,AlexanderFabisch/scikit-learn,r-mart/scikit-learn,joshloyal/scikit-learn,ilo10/scikit-learn,jmetzen/scikit-learn,cybernet14/scikit-learn,hainm/scikit-learn,rahuldhote/scikit-learn,lesteve/scikit-learn,ephes/scikit-learn,zorojean/scikit-learn,mrshu/scikit-learn,MatthieuBizien/scikit-learn,gotomypc/scikit-learn,PatrickChrist/scikit-learn,ltiao/scikit-learn,shikhardb/scikit-learn,xyguo/scikit-learn,betatim/scikit-learn,hainm/scikit-learn,fbagirov/scikit-learn,aabadie/scikit-learn,Obus/scikit-learn,zuku1985/scikit-learn,ngoix/OCRF,harshaneelhg/scikit-learn,jblackburne/scikit-learn,NelisVerhoef/scikit-learn,tdhopper/scikit-learn,hitszxp/scikit-learn,bikong2/scikit-learn,Myasuka/scikit-learn,qifeigit/scikit-learn,costypetrisor/scikit-learn,robin-lai/scikit-learn,ssaeger/scikit-learn,tawsifkhan/scikit-learn,liyu1990/sklearn,bhargav/scikit-learn,walterreade/scikit-learn,vivekmishra1991/scikit-learn,ashhher3/scikit-learn,pnedunuri/scikit-learn,heli522/scikit-learn,dsquareindia/scikit-learn,robbymeals/scikit-learn,nhejazi/scikit-learn,iismd17/scikit-learn,MatthieuBizien/scikit-learn,andaag/scikit-learn,sgenoud/scikit-learn,raghavrv/scikit-learn,pv/scikit-learn,djgagne/scikit-learn,ishanic/scikit-learn,loli/sklearn-ensembletrees,sonnyhu/scikit-learn,vigilv/scikit-learn,bhargav/scikit-learn,plissonf/scikit-learn,zorroblue/scikit-learn,jakirkham/scikit-learn,lazywei/scikit-learn,glouppe/scikit-learn,fbagirov/scikit-learn,xyguo/scikit-learn,ChanChiChoi/scikit-learn,3manuek/scikit-learn,glemaitre/scikit-learn,Akshay0724/scikit-learn,mrshu/scikit-learn,wanggang3333/scikit-learn,raghavrv/scikit-learn,harshaneelhg/scikit-learn,cainiaocome/scikit-learn,vybstat/scikit-learn,AnasGhrab/scikit-learn,treycausey/scikit-learn,mhue/scikit-learn,OshynSong/scikit-learn,rohanp/scikit-learn
--- +++ @@ -1,4 +1,4 @@ -from nose.tools import assert_equals +import numpy as np from .. import datasets from ..pca import PCA @@ -14,8 +14,8 @@ pca = PCA(k=2) X_r = pca.fit(X).transform(X) - assert_equals(X_r.shape[1], 2) + np.testing.assert_equal(X_r.shape[1], 2) pca = PCA() pca.fit(X) - assert_equals(pca.explained_variance_.sum(), 1.0) + np.testing.assert_almost_equal(pca.explained_variance_.sum(), 1.0, 3)
aeec346bf49f9f297802a4c6c50cf28de20a70f8
examples/load.py
examples/load.py
# coding: utf-8 import os import requests ROOT = os.path.dirname(os.path.realpath(__file__)) ENDPOINT = os.environ.get('ES_ENDPOINT_EXTERNAL', 'localhost:9200') INDEX = 'gsiCrawler' eid = 0 with open(os.path.join(ROOT, 'blogPosting.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "twitter", eid) requests.put(url, data=line) eid += 1 with open(os.path.join(ROOT, 'comments-ld.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "reddit", eid) requests.put(url, data=line) eid += 1
# coding: utf-8 import os import requests ROOT = os.path.dirname(os.path.realpath(__file__)) ENDPOINT = os.environ.get('ES_ENDPOINT_EXTERNAL', 'localhost:9200') INDEX = 'gsiCrawler' eid = 0 with open(os.path.join(ROOT, 'blogPosting.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "twitter", eid) requests.put(url, data=line, headers={'Content-Type': 'application/json'}) eid += 1 with open(os.path.join(ROOT, 'comments-ld.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "reddit", eid) requests.put(url, data=line, headers={'Content-Type': 'application/json'}) eid += 1
Add content-type to requests in example
Add content-type to requests in example
Python
apache-2.0
gsi-upm/gsicrawler,gsi-upm/gsicrawler,gsi-upm/gsicrawler,gsi-upm/gsicrawler
--- +++ @@ -13,11 +13,11 @@ with open(os.path.join(ROOT, 'blogPosting.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "twitter", eid) - requests.put(url, data=line) + requests.put(url, data=line, headers={'Content-Type': 'application/json'}) eid += 1 with open(os.path.join(ROOT, 'comments-ld.txt'), 'r') as f: for line in f: url = 'http://{}/{}/{}/{}'.format(ENDPOINT, INDEX, "reddit", eid) - requests.put(url, data=line) + requests.put(url, data=line, headers={'Content-Type': 'application/json'}) eid += 1
4ca6d139139a08151f7cdf89993ded3440287a4a
keyform/urls.py
keyform/urls.py
from django.conf.urls import url, include from django.contrib import admin from django.contrib.auth.views import login, logout_then_login from keyform import views urlpatterns = [ url(r'^$', views.HomeView.as_view(), name='home'), url(r'^contact$', views.ContactView.as_view(), name='contact'), url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'), url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'), url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'), url(r'^create$', views.KeyRequest.as_view(), name='create'), url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'), url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}), url(r'^logout$', logout_then_login, name='logout'), ]
from django.conf.urls import url, include from django.contrib import admin from django.views.generic import RedirectView from django.contrib.auth.views import login, logout_then_login from keyform import views urlpatterns = [ url(r'^$', views.HomeView.as_view(), name='home'), url(r'^table.php$', RedirectView.as_view(pattern_name='home', permanent=True)), url(r'^contact$', views.ContactView.as_view(), name='contact'), url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'), url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'), url(r'^edit-request/(?P<pk>\d+)$', views.RequestView.as_view(), name='edit-request'), url(r'^create$', views.KeyRequest.as_view(), name='create'), url(r'^add-comment$', views.RequestCommentView.as_view(), name='add-comment'), url(r'^login$', login, name='login', kwargs={'template_name': 'keyform/login.html'}), url(r'^logout$', logout_then_login, name='logout'), ]
Add redirect for old hotlinks
Add redirect for old hotlinks
Python
mit
mostateresnet/keyformproject,mostateresnet/keyformproject,mostateresnet/keyformproject
--- +++ @@ -1,10 +1,12 @@ from django.conf.urls import url, include from django.contrib import admin +from django.views.generic import RedirectView from django.contrib.auth.views import login, logout_then_login from keyform import views urlpatterns = [ url(r'^$', views.HomeView.as_view(), name='home'), + url(r'^table.php$', RedirectView.as_view(pattern_name='home', permanent=True)), url(r'^contact$', views.ContactView.as_view(), name='contact'), url(r'^edit-contact/(?P<pk>\d+)$', views.EditContactView.as_view(), name='edit-contact'), url(r'^create-contact$', views.NewContactView.as_view(), name='create-contact'),
1f1c8eed6a60945a404aa0efd6169687431c87d5
exec_thread_1.py
exec_thread_1.py
import spam #Convert the LTA file to the UVFITS format spam.convert_lta_to_uvfits('Name of the file') spam.precalibrate_targets('Name of UVFITS output file') spam.process_target()
import spam #Convert the LTA file to the UVFITS format #Generates UVFITS file with same basename as LTA file spam.convert_lta_to_uvfits('Name of the file') #Take generated UVFITS file as input and precalibrate targets #Generates files (RRLL with the name of the source (can be obtained using ltahdr) spam.precalibrate_targets('Name of UVFITS output file') #Take the generated RRLL UVFITS file and process to generate the image #Generates final image <source name>.SP2B.PBCOR.FITS #Also generates log file spam_<source name>_<start date>_start_time>.log in #datfil dir spam.process_target()
Add pipeline flow (in comments) to thread template
Add pipeline flow (in comments) to thread template
Python
mit
NCRA-TIFR/gadpu,NCRA-TIFR/gadpu
--- +++ @@ -1,9 +1,15 @@ import spam #Convert the LTA file to the UVFITS format - +#Generates UVFITS file with same basename as LTA file spam.convert_lta_to_uvfits('Name of the file') +#Take generated UVFITS file as input and precalibrate targets +#Generates files (RRLL with the name of the source (can be obtained using ltahdr) spam.precalibrate_targets('Name of UVFITS output file') +#Take the generated RRLL UVFITS file and process to generate the image +#Generates final image <source name>.SP2B.PBCOR.FITS +#Also generates log file spam_<source name>_<start date>_start_time>.log in +#datfil dir spam.process_target()
89a0edf7e5e00de68615574b2044f593e0339f2e
jsonrpc/views.py
jsonrpc/views.py
from _json import dumps from django.http import HttpResponse from django.shortcuts import render_to_response from jsonrpc.site import jsonrpc_site from jsonrpc import mochikit def browse(request): if (request.GET.get('f', None) == 'mochikit.js'): return HttpResponse(mochikit.mochikit, content_type='application/x-javascript') if (request.GET.get('f', None) == 'interpreter.js'): return HttpResponse(mochikit.interpreter, content_type='application/x-javascript') desc = jsonrpc_site.service_desc() return render_to_response('browse.html', { 'methods': desc['procs'], 'method_names_str': dumps( [m['name'] for m in desc['procs']]) })
from _json import dumps from django.http import HttpResponse from django.shortcuts import render_to_response from jsonrpc.site import jsonrpc_site from jsonrpc import mochikit def browse(request, site=jsonrpc_site): if (request.GET.get('f', None) == 'mochikit.js'): return HttpResponse(mochikit.mochikit, content_type='application/x-javascript') if (request.GET.get('f', None) == 'interpreter.js'): return HttpResponse(mochikit.interpreter, content_type='application/x-javascript') desc = site.service_desc() return render_to_response('browse.html', { 'methods': desc['procs'], 'method_names_str': dumps( [m['name'] for m in desc['procs']]) })
Make browse work with non-default sites
Make browse work with non-default sites
Python
mit
palfrey/django-json-rpc
--- +++ @@ -4,12 +4,12 @@ from jsonrpc.site import jsonrpc_site from jsonrpc import mochikit -def browse(request): +def browse(request, site=jsonrpc_site): if (request.GET.get('f', None) == 'mochikit.js'): return HttpResponse(mochikit.mochikit, content_type='application/x-javascript') if (request.GET.get('f', None) == 'interpreter.js'): return HttpResponse(mochikit.interpreter, content_type='application/x-javascript') - desc = jsonrpc_site.service_desc() + desc = site.service_desc() return render_to_response('browse.html', { 'methods': desc['procs'], 'method_names_str': dumps(
7fd0c08926e9e4e24df2afe047625b3ceb651a02
examples/sponza/effect.py
examples/sponza/effect.py
import moderngl as mgl from demosys.effects import effect class SceneEffect(effect.Effect): """Generated default effect""" def __init__(self): self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True) self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0) @effect.bind_target def draw(self, time, frametime, target): self.ctx.enable(mgl.DEPTH_TEST) self.sys_camera.velocity = self.scene.diagonal_size / 5.0 self.scene.draw( projection_matrix=self.proj_mat, camera_matrix=self.sys_camera.view_matrix, time=time, ) # Draw bbox self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
import moderngl as mgl from demosys.effects import effect class SceneEffect(effect.Effect): """Generated default effect""" def __init__(self): self.scene = self.get_scene("Sponza/glTF/Sponza.gltf", local=True) self.proj_mat = self.create_projection(fov=75.0, near=0.01, far=1000.0) @effect.bind_target def draw(self, time, frametime, target): self.ctx.enable(mgl.DEPTH_TEST) self.sys_camera.velocity = self.scene.diagonal_size / 5.0 self.scene.draw( projection_matrix=self.proj_mat, camera_matrix=self.sys_camera.view_matrix, time=time, ) # Draw bbox # self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
Disable bbox draw in sponza example
Disable bbox draw in sponza example
Python
isc
Contraz/demosys-py
--- +++ @@ -21,4 +21,4 @@ ) # Draw bbox - self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True) + # self.scene.draw_bbox(self.proj_mat, self.sys_camera.view_matrix, all=True)
5e969205ab1840aaa83008ce8ef8600d40743eec
neutron/objects/stdattrs.py
neutron/objects/stdattrs.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import fields as obj_fields from neutron.db import standard_attr from neutron.objects import base from neutron.objects.extensions import standardattributes as stdattr_obj # TODO(ihrachys): add unit tests for the object @base.NeutronObjectRegistry.register class StandardAttribute(base.NeutronDbObject): # Version 1.0: Initial version VERSION = '1.0' db_model = standard_attr.StandardAttribute fields = { 'id': obj_fields.IntegerField(), 'resource_type': obj_fields.StringField(), } fields.update(stdattr_obj.STANDARD_ATTRIBUTES)
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_versionedobjects import fields as obj_fields from neutron.db import standard_attr from neutron.objects import base from neutron.objects.extensions import standardattributes as stdattr_obj # TODO(ihrachys): add unit tests for the object @base.NeutronObjectRegistry.register class StandardAttribute(base.NeutronDbObject): # Version 1.0: Initial version VERSION = '1.0' new_facade = True db_model = standard_attr.StandardAttribute fields = { 'id': obj_fields.IntegerField(), 'resource_type': obj_fields.StringField(), } fields.update(stdattr_obj.STANDARD_ATTRIBUTES)
Switch to new engine facade for StandardAttribute objects
Switch to new engine facade for StandardAttribute objects Enable the new Engine Facade for StandardAttribute objects. Change-Id: Ia3eb436d07e3b2fc633b219aa00c78cc07ed30db
Python
apache-2.0
mahak/neutron,openstack/neutron,openstack/neutron,openstack/neutron,mahak/neutron,mahak/neutron
--- +++ @@ -24,6 +24,8 @@ # Version 1.0: Initial version VERSION = '1.0' + new_facade = True + db_model = standard_attr.StandardAttribute fields = {
9faf5f090239d80a79c426de83c7a0025eb08ea5
src/sentry/options/defaults.py
src/sentry/options/defaults.py
""" sentry.options.defaults ~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import, print_function from sentry.options import register, FLAG_NOSTORE, FLAG_REQUIRED, FLAG_PRIORITIZE_DISK register('cache.backend', flags=FLAG_NOSTORE) register('cache.options', default={}, flags=FLAG_NOSTORE) register('system.admin-email', flags=FLAG_REQUIRED) register('system.databases', default={}, flags=FLAG_NOSTORE) register('system.debug', default=False, flags=FLAG_NOSTORE) register('system.rate-limit', default=0, type=int) register('system.secret-key', flags=FLAG_NOSTORE) register('redis.options', default={}, flags=FLAG_NOSTORE) # Absolute URL to the sentry root directory. Should not include a trailing slash. register('system.url-prefix', ttl=60, grace=3600, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
""" sentry.options.defaults ~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import, print_function from sentry.options import register, FLAG_NOSTORE, FLAG_REQUIRED, FLAG_PRIORITIZE_DISK register('cache.backend', flags=FLAG_NOSTORE) register('cache.options', default={}, flags=FLAG_NOSTORE) register('system.admin-email', flags=FLAG_REQUIRED) register('system.databases', default={}, flags=FLAG_NOSTORE) register('system.debug', default=False, flags=FLAG_NOSTORE) register('system.rate-limit', default=0, type=int, flags=FLAG_PRIORITIZE_DISK) register('system.secret-key', flags=FLAG_NOSTORE) register('redis.options', default={}, flags=FLAG_NOSTORE) # Absolute URL to the sentry root directory. Should not include a trailing slash. register('system.url-prefix', ttl=60, grace=3600, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
Change system.rate-limit to prioritize disk
Change system.rate-limit to prioritize disk
Python
bsd-3-clause
ifduyue/sentry,ifduyue/sentry,mvaled/sentry,BuildingLink/sentry,JamesMura/sentry,looker/sentry,mvaled/sentry,JamesMura/sentry,ifduyue/sentry,looker/sentry,JamesMura/sentry,gencer/sentry,JackDanger/sentry,looker/sentry,fotinakis/sentry,beeftornado/sentry,fotinakis/sentry,BuildingLink/sentry,JamesMura/sentry,mvaled/sentry,BuildingLink/sentry,jean/sentry,beeftornado/sentry,zenefits/sentry,nicholasserra/sentry,daevaorn/sentry,mitsuhiko/sentry,fotinakis/sentry,gencer/sentry,JackDanger/sentry,fotinakis/sentry,alexm92/sentry,looker/sentry,gencer/sentry,daevaorn/sentry,BuildingLink/sentry,alexm92/sentry,mvaled/sentry,jean/sentry,mvaled/sentry,JamesMura/sentry,zenefits/sentry,zenefits/sentry,jean/sentry,JackDanger/sentry,ifduyue/sentry,ifduyue/sentry,alexm92/sentry,zenefits/sentry,beeftornado/sentry,mitsuhiko/sentry,daevaorn/sentry,nicholasserra/sentry,BuildingLink/sentry,daevaorn/sentry,gencer/sentry,nicholasserra/sentry,mvaled/sentry,jean/sentry,zenefits/sentry,looker/sentry,jean/sentry,gencer/sentry
--- +++ @@ -16,7 +16,7 @@ register('system.admin-email', flags=FLAG_REQUIRED) register('system.databases', default={}, flags=FLAG_NOSTORE) register('system.debug', default=False, flags=FLAG_NOSTORE) -register('system.rate-limit', default=0, type=int) +register('system.rate-limit', default=0, type=int, flags=FLAG_PRIORITIZE_DISK) register('system.secret-key', flags=FLAG_NOSTORE) register('redis.options', default={}, flags=FLAG_NOSTORE)
b0916a35dc0049105acb3b2b62a579353e57d33a
erpnext/accounts/doctype/bank/bank_dashboard.py
erpnext/accounts/doctype/bank/bank_dashboard.py
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] }, { 'items': ['Payment Order'] } ] }
from __future__ import unicode_literals from frappe import _ def get_data(): return { 'fieldname': 'bank', 'transactions': [ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] } ] }
Remove payment order from bank dashboard
fix: Remove payment order from bank dashboard
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
--- +++ @@ -10,9 +10,6 @@ { 'label': _('Bank Deatils'), 'items': ['Bank Account', 'Bank Guarantee'] - }, - { - 'items': ['Payment Order'] } ] }
bf66f0f267b6bca16241ed4920199dfa4128bd5c
social_core/backends/surveymonkey.py
social_core/backends/surveymonkey.py
""" SurveyMonkey OAuth2 backend, docs at: https://developer.surveymonkey.com/api/v3/#authentication """ from .oauth import BaseOAuth2 class SurveyMonkeyOAuth2(BaseOAuth2): """SurveyMonkey OAuth2 authentication backend""" name = 'surveymonkey' AUTHORIZATION_URL = 'https://api.surveymonkey.com/oauth/authorize' ACCESS_TOKEN_URL = 'https://api.surveymonkey.com/oauth/token' ACCESS_TOKEN_METHOD = 'POST' USER_DATA_URL = '/v3/users/me' EXTRA_DATA = [ ('access_url', 'access_url'), ] def get_user_details(self, response): """Return user details from a SurveyMonkey /users/me response""" response["name"] = response['first_name'] + ' ' + response['last_name'] return response def user_data(self, access_token, *args, **kwargs): """Loads user data information from service""" base_url = kwargs["response"]["access_url"] return self.get_json(base_url + self.USER_DATA_URL, headers={ 'Authorization': 'bearer ' + access_token })
""" SurveyMonkey OAuth2 backend, docs at: https://developer.surveymonkey.com/api/v3/#authentication """ from .oauth import BaseOAuth2 class SurveyMonkeyOAuth2(BaseOAuth2): """SurveyMonkey OAuth2 authentication backend""" name = 'surveymonkey' AUTHORIZATION_URL = 'https://api.surveymonkey.com/oauth/authorize' ACCESS_TOKEN_URL = 'https://api.surveymonkey.com/oauth/token' ACCESS_TOKEN_METHOD = 'POST' USER_DATA_URL = '/v3/users/me' STATE_PARAMETER = False REDIRECT_STATE = False EXTRA_DATA = [ ('access_url', 'access_url'), ] def get_user_details(self, response): """Return user details from a SurveyMonkey /users/me response""" response["name"] = response['first_name'] + ' ' + response['last_name'] return response def user_data(self, access_token, *args, **kwargs): """Loads user data information from service""" base_url = kwargs["response"]["access_url"] return self.get_json(base_url + self.USER_DATA_URL, headers={ 'Authorization': 'bearer ' + access_token })
Disable the STATE parameter - it doesn't play nice with the SurveyMonkey App Directory links
Disable the STATE parameter - it doesn't play nice with the SurveyMonkey App Directory links
Python
bsd-3-clause
python-social-auth/social-core,python-social-auth/social-core
--- +++ @@ -12,6 +12,8 @@ ACCESS_TOKEN_URL = 'https://api.surveymonkey.com/oauth/token' ACCESS_TOKEN_METHOD = 'POST' USER_DATA_URL = '/v3/users/me' + STATE_PARAMETER = False + REDIRECT_STATE = False EXTRA_DATA = [ ('access_url', 'access_url'), ]
49602d0abfe93a0c98f55d932e7b86ddf2a59d38
connect.py
connect.py
import ConfigParser import threading import time import chatbot def runbot(t): config = ConfigParser.ConfigParser() config.readfp(open('./config.ini')) ws = chatbot.Chatbot(config.get('Chatbot', 'server'), protocols=['http-only', 'chat']) try: ws.connect() ws.run_forever() except Exception as e: print 'Exception: {0}\nArguments:\n{1!r}'.format(type(e).__name__, e.args) print 'Unable to connect. Timing out for %s seconds...' % t time.sleep(t) runbot(t+2) if __name__ == '__main__': while True: try: if 'chatbot' not in [x.name for x in threading.enumerate()]: bot = threading.Thread(target=runbot, name='chatbot', args=([2])) # Entire program exits when there are only daemon threads bot.daemon = True bot.start() time.sleep(10) except (KeyboardInterrupt, SystemExit): # Entire program will exit, since MainThread is the only non-daemon thread # The sole purpose of this is so CTRL+C etc. will close the whole program exit()
import ConfigParser import threading import time import chatbot def runbot(t): config = ConfigParser.ConfigParser() config.readfp(open('./config.ini')) ws = chatbot.Chatbot(config.get('Chatbot', 'server'), protocols=['http-only', 'chat']) try: ws.connect() ws.run_forever() except Exception as e: print 'Exception: {0}\nArguments:\n{1!r}'.format(type(e).__name__, e.args) print 'Unable to connect. Timing out for %s seconds...' % t time.sleep(t) runbot(t+2) if __name__ == '__main__': while True: try: if 'chatbot' not in [x.name for x in threading.enumerate()]: time.sleep(10) bot = threading.Thread(target=runbot, name='chatbot', args=([2])) # Entire program exits when there are only daemon threads bot.daemon = True bot.start() except (KeyboardInterrupt, SystemExit): # Entire program will exit, since MainThread is the only non-daemon thread # The sole purpose of this is so CTRL+C etc. will close the whole program exit()
Add sleep before new Chatbot instance is created on crash
Add sleep before new Chatbot instance is created on crash
Python
mit
ScottehMax/pyMon,ScottehMax/pyMon,lc-guy/pyMon,lc-guy/pyMon
--- +++ @@ -24,11 +24,11 @@ while True: try: if 'chatbot' not in [x.name for x in threading.enumerate()]: + time.sleep(10) bot = threading.Thread(target=runbot, name='chatbot', args=([2])) # Entire program exits when there are only daemon threads bot.daemon = True bot.start() - time.sleep(10) except (KeyboardInterrupt, SystemExit): # Entire program will exit, since MainThread is the only non-daemon thread # The sole purpose of this is so CTRL+C etc. will close the whole program
ee81f71d7c6b311ee760b42ca5c9b7e80f44a8d7
src/pip/_internal/metadata/importlib/_compat.py
src/pip/_internal/metadata/importlib/_compat.py
import importlib.metadata from typing import Optional, Protocol class BasePath(Protocol): """A protocol that various path objects conform. This exists because importlib.metadata uses both ``pathlib.Path`` and ``zipfile.Path``, and we need a common base for type hints (Union does not work well since ``zipfile.Path`` is too new for our linter setup). This does not mean to be exhaustive, but only contains things that present in both classes *that we need*. """ name: str @property def parent(self) -> "BasePath": raise NotImplementedError() def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]: """Find the path to the distribution's metadata directory. HACK: This relies on importlib.metadata's private ``_path`` attribute. Not all distributions exist on disk, so importlib.metadata is correct to not expose the attribute as public. But pip's code base is old and not as clean, so we do this to avoid having to rewrite too many things. Hopefully we can eliminate this some day. """ return getattr(d, "_path", None) def get_dist_name(dist: importlib.metadata.Distribution) -> str: """Get the distribution's project name. The ``name`` attribute is only available in Python 3.10 or later. We are targeting exactly that, but Mypy does not know this. """ return dist.name # type: ignore[attr-defined]
import importlib.metadata from typing import Any, Optional, Protocol, cast class BasePath(Protocol): """A protocol that various path objects conform. This exists because importlib.metadata uses both ``pathlib.Path`` and ``zipfile.Path``, and we need a common base for type hints (Union does not work well since ``zipfile.Path`` is too new for our linter setup). This does not mean to be exhaustive, but only contains things that present in both classes *that we need*. """ name: str @property def parent(self) -> "BasePath": raise NotImplementedError() def get_info_location(d: importlib.metadata.Distribution) -> Optional[BasePath]: """Find the path to the distribution's metadata directory. HACK: This relies on importlib.metadata's private ``_path`` attribute. Not all distributions exist on disk, so importlib.metadata is correct to not expose the attribute as public. But pip's code base is old and not as clean, so we do this to avoid having to rewrite too many things. Hopefully we can eliminate this some day. """ return getattr(d, "_path", None) def get_dist_name(dist: importlib.metadata.Distribution) -> str: """Get the distribution's project name. The ``name`` attribute is only available in Python 3.10 or later. We are targeting exactly that, but Mypy does not know this. """ return cast(Any, dist).name
Make version hack more reliable
Make version hack more reliable
Python
mit
pradyunsg/pip,pypa/pip,pradyunsg/pip,pypa/pip,sbidoul/pip,sbidoul/pip,pfmoore/pip,pfmoore/pip
--- +++ @@ -1,5 +1,5 @@ import importlib.metadata -from typing import Optional, Protocol +from typing import Any, Optional, Protocol, cast class BasePath(Protocol): @@ -38,4 +38,4 @@ The ``name`` attribute is only available in Python 3.10 or later. We are targeting exactly that, but Mypy does not know this. """ - return dist.name # type: ignore[attr-defined] + return cast(Any, dist).name
2625b539a05156fe3baea1ebf195d242740b599d
osfclient/models/storage.py
osfclient/models/storage.py
from .core import OSFCore from .file import File class Storage(OSFCore): def _update_attributes(self, storage): if not storage: return # XXX does this happen? if 'data' in storage: storage = storage['data'] self.id = self._get_attribute(storage, 'id') self.path = self._get_attribute(storage, 'attributes', 'path') self.name = self._get_attribute(storage, 'attributes', 'name') self.node = self._get_attribute(storage, 'attributes', 'node') self.provider = self._get_attribute(storage, 'attributes', 'provider') files = ['relationships', 'files', 'links', 'related', 'href'] self._files_url = self._get_attribute(storage, *files) def __str__(self): return '<Storage [{0}]>'.format(self.id) @property def files(self): """Iterate over all files in this storage""" files = self._follow_next(self._files_url) while files: file = files.pop() kind = self._get_attribute(file, 'attributes', 'kind') if kind == 'file': yield File(file) else: sub_dir_url = ('relationships', 'files', 'links', 'related', 'href') url = self._get_attribute(file, *sub_dir_url) files.extend(self._follow_next(url))
from .core import OSFCore from .file import File class Storage(OSFCore): def _update_attributes(self, storage): if not storage: return # XXX does this happen? if 'data' in storage: storage = storage['data'] self.id = self._get_attribute(storage, 'id') self.path = self._get_attribute(storage, 'attributes', 'path') self.name = self._get_attribute(storage, 'attributes', 'name') self.node = self._get_attribute(storage, 'attributes', 'node') self.provider = self._get_attribute(storage, 'attributes', 'provider') self._files_key = ('relationships', 'files', 'links', 'related', 'href') self._files_url = self._get_attribute(storage, *self._files_key) def __str__(self): return '<Storage [{0}]>'.format(self.id) @property def files(self): """Iterate over all files in this storage""" files = self._follow_next(self._files_url) while files: file = files.pop() kind = self._get_attribute(file, 'attributes', 'kind') if kind == 'file': yield File(file) else: # recurse into a folder and add entries to `files` url = self._get_attribute(file, *self._files_key) files.extend(self._follow_next(url))
Refactor key to access files from a folder's JSON
Refactor key to access files from a folder's JSON
Python
bsd-3-clause
betatim/osf-cli,betatim/osf-cli
--- +++ @@ -18,8 +18,9 @@ self.node = self._get_attribute(storage, 'attributes', 'node') self.provider = self._get_attribute(storage, 'attributes', 'provider') - files = ['relationships', 'files', 'links', 'related', 'href'] - self._files_url = self._get_attribute(storage, *files) + self._files_key = ('relationships', 'files', 'links', 'related', + 'href') + self._files_url = self._get_attribute(storage, *self._files_key) def __str__(self): return '<Storage [{0}]>'.format(self.id) @@ -35,7 +36,6 @@ if kind == 'file': yield File(file) else: - sub_dir_url = ('relationships', 'files', 'links', - 'related', 'href') - url = self._get_attribute(file, *sub_dir_url) + # recurse into a folder and add entries to `files` + url = self._get_attribute(file, *self._files_key) files.extend(self._follow_next(url))
173b4f39433aa27970955173e63f99f58cfeecb1
custom/enikshay/urls.py
custom/enikshay/urls.py
from django.conf.urls import patterns, include urlpatterns = patterns( 'custom.enikshay.integrations.ninetyninedots.views', (r'^99dots/', include("custom.enikshay.integrations.ninetyninedots.urls")), )
from django.conf.urls import patterns, include urlpatterns = patterns( '', (r'^99dots/', include("custom.enikshay.integrations.ninetyninedots.urls")), )
Remove reference to wrong view
Remove reference to wrong view
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
--- +++ @@ -1,6 +1,6 @@ from django.conf.urls import patterns, include urlpatterns = patterns( - 'custom.enikshay.integrations.ninetyninedots.views', + '', (r'^99dots/', include("custom.enikshay.integrations.ninetyninedots.urls")), )
5bc2ce310cfb13b966b022573255c0042fc03791
application/page/models.py
application/page/models.py
import datetime from sqlalchemy import desc from application import db class Page(db.Model): __tablename__ = 'page' id = db.Column(db.Integer, primary_key=True) path = db.Column(db.String(256), unique=True) revisions = db.relationship('PageRevision', backref='page', lazy='dynamic') def __init__(self, path): self.path = path def get_most_recent(self): revision = PageRevision.query.filter(PageRevision.page_id == self.id) revision = revision.order_by(PageRevision.timestamp.desc()).first() revision.path = self.path return revision class PageRevision(db.Model): __tablename__ = 'page_revision' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(128)) content = db.Column(db.Text) timestamp = db.Column(db.DateTime) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) page_id = db.Column(db.Integer, db.ForeignKey('page.id')) def __init__(self, page, author, title, content, timestamp=datetime.datetime.utcnow()): self.title = title self.path = '' self.content = content self.user_id = author.id self.page_id = page.id self.timestamp = timestamp
import datetime from sqlalchemy import desc from application import db class Page(db.Model): __tablename__ = 'page' id = db.Column(db.Integer, primary_key=True) path = db.Column(db.String(256), unique=True) revisions = db.relationship('PageRevision', backref='page', lazy='dynamic') def __init__(self, path): self.path = path def get_most_recent(self): revision = PageRevision.query.filter(PageRevision.page_id == self.id) revision = revision.order_by(PageRevision.timestamp.desc()).first() if revision is not None: revision.path = self.path return revision class PageRevision(db.Model): __tablename__ = 'page_revision' id = db.Column(db.Integer, primary_key=True) title = db.Column(db.String(128)) content = db.Column(db.Text) timestamp = db.Column(db.DateTime) user_id = db.Column(db.Integer, db.ForeignKey('user.id')) page_id = db.Column(db.Integer, db.ForeignKey('page.id')) def __init__(self, page, author, title, content, timestamp=datetime.datetime.utcnow()): self.title = title self.path = '' self.content = content self.user_id = author.id self.page_id = page.id self.timestamp = timestamp
Fix navigition links a bit again
Fix navigition links a bit again
Python
mit
viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct,viaict/viaduct
--- +++ @@ -16,7 +16,8 @@ def get_most_recent(self): revision = PageRevision.query.filter(PageRevision.page_id == self.id) revision = revision.order_by(PageRevision.timestamp.desc()).first() - revision.path = self.path + if revision is not None: + revision.path = self.path return revision
0f5fcca49bc22b8a481ba86e9421757ac373a932
bin/example_game_programmatic.py
bin/example_game_programmatic.py
from vengeance.game import Direction from vengeance.game import Game from vengeance.game import Location go_up = Direction('up') go_down = Direction('down') go_up.opposite = go_down go_in = Direction('in') go_out = Direction('out') go_in.opposite = go_out go_west = Direction('west') go_east = Direction('east') go_west.opposite = go_east church = Location('A Church', 'Tiny place of worship') crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi') coffin = Location('A Coffin', 'A tight squeeze and pitch dark') cave = Location('A Cave') church.add_exit(go_down, crypt) crypt.add_one_way_exit(go_in, coffin) crypt.add_exit(go_west, cave) game = Game([church, crypt, coffin, cave]) # Move the player down from the church to the crypt game.process_input('d') game.run()
from vengeance.game import Direction from vengeance.game import Game from vengeance.game import Location go_up = Direction('up') go_down = Direction('down') go_up.opposite = go_down go_in = Direction('in') go_out = Direction('out') go_in.opposite = go_out go_west = Direction('west') go_east = Direction('east') go_west.opposite = go_east church = Location('A Church', 'Tiny place of worship') crypt = Location('The Crypt', 'Dusty tomb filled with empty sarcophagi') coffin = Location('A Coffin', 'A tight squeeze and pitch dark') cave = Location('A Cave') church.add_exit(go_down, crypt) crypt.add_one_way_exit(go_in, coffin) crypt.add_exit(go_west, cave) game = Game([church, crypt, coffin, cave]) # Move the player down from the church to the crypt print("Current location: " + game.character.current_location.name) game.process_input('d') print("Current location: " + game.character.current_location.name) game.run()
Add use of Game.character.current_location to example
Add use of Game.character.current_location to example
Python
unlicense
mmurdoch/Vengeance,mmurdoch/Vengeance
--- +++ @@ -26,6 +26,8 @@ game = Game([church, crypt, coffin, cave]) # Move the player down from the church to the crypt +print("Current location: " + game.character.current_location.name) game.process_input('d') +print("Current location: " + game.character.current_location.name) game.run()
ceebd0b345fe7221577bfcfe18632267897871e8
test/helpers/xnat_test_helper.py
test/helpers/xnat_test_helper.py
import os, re from base64 import b64encode as encode from qipipe.staging import airc_collection as airc from qipipe.staging.staging_helper import SUBJECT_FMT from qipipe.helpers import xnat_helper import logging logger = logging.getLogger(__name__) def generate_subject_name(name): """ Makes a subject name that is unique to the given test name. @param name: the test name @return: the test subject name """ return 'Test_' + encode(name).strip('=') def get_subjects(collection, source, pattern=None): """ Infers the XNAT subject names from the given source directory. The source directory contains subject subdirectories. The match pattern matches on the subdirectories and captures the subject number. The subject name is the collection name followed by the subject number, e.g. C{Breast004}. @param collection: the AIRC collection name @param source: the input parent directory @param pattern: the subject directory name match pattern (default L{airc.AIRCCollection.subject_pattern}) @return: the subject name => directory dictionary """ airc_coll = airc.collection_with_name(collection) pat = pattern or airc_coll.subject_pattern sbj_dir_dict = {} with xnat_helper.connection() as xnat: for d in os.listdir(source): match = re.match(pat, d) if match: # The XNAT subject name. subject = SUBJECT_FMT % (collection, int(match.group(1))) # The subject source directory. sbj_dir_dict[subject] = os.path.join(source, d) logger.debug("Discovered XNAT test subject %s subdirectory: %s" % (subject, d)) return sbj_dir_dict def delete_subjects(*subject_names): """ Deletes the given XNAT subjects, if they exist. @param subject_names: the XNAT subject names """ with xnat_helper.connection() as xnat: for sbj_lbl in subject_names: sbj = xnat.get_subject('QIN', sbj_lbl) if sbj.exists(): sbj.delete() logger.debug("Deleted the XNAT test subject %s." % sbj_lbl)
import os, re from base64 import b64encode as encode from qipipe.staging import airc_collection as airc from qipipe.staging.staging_helper import SUBJECT_FMT from qipipe.helpers import xnat_helper import logging logger = logging.getLogger(__name__) def generate_subject_name(name): """ Makes a subject name that is unique to the given test name. @param name: the test name @return: the test subject name """ return 'Test_' + encode(name).strip('=')
Move get_subjects and delete_subjects to qipipe helpers.
Move get_subjects and delete_subjects to qipipe helpers.
Python
bsd-2-clause
ohsu-qin/qipipe
--- +++ @@ -15,45 +15,3 @@ @return: the test subject name """ return 'Test_' + encode(name).strip('=') - -def get_subjects(collection, source, pattern=None): - """ - Infers the XNAT subject names from the given source directory. - The source directory contains subject subdirectories. - The match pattern matches on the subdirectories and captures the - subject number. The subject name is the collection name followed - by the subject number, e.g. C{Breast004}. - - @param collection: the AIRC collection name - @param source: the input parent directory - @param pattern: the subject directory name match pattern - (default L{airc.AIRCCollection.subject_pattern}) - @return: the subject name => directory dictionary - """ - airc_coll = airc.collection_with_name(collection) - pat = pattern or airc_coll.subject_pattern - sbj_dir_dict = {} - with xnat_helper.connection() as xnat: - for d in os.listdir(source): - match = re.match(pat, d) - if match: - # The XNAT subject name. - subject = SUBJECT_FMT % (collection, int(match.group(1))) - # The subject source directory. - sbj_dir_dict[subject] = os.path.join(source, d) - logger.debug("Discovered XNAT test subject %s subdirectory: %s" % (subject, d)) - - return sbj_dir_dict - -def delete_subjects(*subject_names): - """ - Deletes the given XNAT subjects, if they exist. - - @param subject_names: the XNAT subject names - """ - with xnat_helper.connection() as xnat: - for sbj_lbl in subject_names: - sbj = xnat.get_subject('QIN', sbj_lbl) - if sbj.exists(): - sbj.delete() - logger.debug("Deleted the XNAT test subject %s." % sbj_lbl)
518a572c4979d98fda60a4b736984fe3673ecc0a
courses.py
courses.py
from glob import glob course_map = {'course_folder_name' : 'Full Course Name'} def update_lectures(): course_lectures = dict() for course_id in course_map: vid_files = sorted(glob('static/courses/%s/*.mp4' % course_id)) lectures = dict((n+1, str(x)) for n,x in enumerate(vid_files)) course_lectures[course_id] = lectures return course_lectures
from glob import glob course_map = {'course_folder_name' : 'Full Course Name'} def update_lectures(): course_lectures = dict() for course_id in course_map: vid_files = sorted(glob('static/courses/%s/*.mp4' % course_id)) lectures = dict((n+1, str(x)) for n,x in enumerate(vid_files)) course_lectures[course_id] = lectures return course_lectures
Make update_lectures return outside the loop
Make update_lectures return outside the loop
Python
mit
jailuthra/webedu
--- +++ @@ -8,4 +8,4 @@ vid_files = sorted(glob('static/courses/%s/*.mp4' % course_id)) lectures = dict((n+1, str(x)) for n,x in enumerate(vid_files)) course_lectures[course_id] = lectures - return course_lectures + return course_lectures
4d7ffb1b09c861a28da3acaae94ee84cb9ee85b7
nap/api.py
nap/api.py
# TODO: Add other patterns to allow introspection? class Api(object): '''Helper class for registering many Publishers in one URL namespace''' def __init__(self, name): self.name = name self.children = {} def patterns(self): urlpatterns = [] for child in self.children: urlpatterns.extend(child.patterns()) return [ (r'^%s/' % self.name, include([ (r'^%s/' % name, include(child.patterns())) for name, child in self.children.items() ])) ] def register(self, child, name=None): if name is None: name = getattr(child, 'api_name', child.__class__.__name__.lower()) if name in self.children: raise Warning('Publisher with name %s already registered: %r -> %r' % ( name, self.children[name], child )) self.children[name] = child
from django.conf.urls import url, include # TODO: Add other patterns to allow introspection? class Api(object): '''Helper class for registering many Publishers in one URL namespace''' def __init__(self, name): self.name = name self.children = {} def patterns(self, flat=False): urlpatterns = [ url(r'^%s/' % name, include(child.patterns())) for name, child in self.children.items() ] if flat: return urlpatterns return [ url(r'^%s/' % self.name, include(urlpatterns)), ] def register(self, child, name=None): if name is None: name = getattr(child, 'api_name', child.__class__.__name__.lower()) if name in self.children: raise Warning('Publisher with name %s already registered: %r -> %r' % ( name, self.children[name], child )) self.children[name] = child APIS = {} def register(name, *args): try: api = APIS[name] except KeyError: api = APIS[name] = Api(name=name) for resource in args: api.register(resource) return api def autodiscover(): from django.conf import settings from django.utils.importlib import import_module from django.utils.module_loading import module_has_submodule for app in settings.INSTALLED_APPS: mod = import_module(app) # Attempt to import the app's api module. try: import_module('%s.serialiser' % app) except: # Decide whether to bubble up this error. If the app just # doesn't have an admin module, we can ignore the error # attempting to import it, otherwise we want it to bubble up. if module_has_submodule(mod, 'serialiser'): raise def patterns(flat=False): patterns = [] for api in APIS.values(): patterns.extend(api.patterns(flat=flat)) return patterns
Add flat patterns for Api Add register/autodiscover system for Api
Add flat patterns for Api Add register/autodiscover system for Api
Python
bsd-3-clause
limbera/django-nap,MarkusH/django-nap
--- +++ @@ -1,4 +1,5 @@ +from django.conf.urls import url, include # TODO: Add other patterns to allow introspection? @@ -8,15 +9,15 @@ self.name = name self.children = {} - def patterns(self): - urlpatterns = [] - for child in self.children: - urlpatterns.extend(child.patterns()) + def patterns(self, flat=False): + urlpatterns = [ + url(r'^%s/' % name, include(child.patterns())) + for name, child in self.children.items() + ] + if flat: + return urlpatterns return [ - (r'^%s/' % self.name, include([ - (r'^%s/' % name, include(child.patterns())) - for name, child in self.children.items() - ])) + url(r'^%s/' % self.name, include(urlpatterns)), ] def register(self, child, name=None): @@ -28,3 +29,38 @@ )) self.children[name] = child +APIS = {} + +def register(name, *args): + try: + api = APIS[name] + except KeyError: + api = APIS[name] = Api(name=name) + for resource in args: + api.register(resource) + return api + +def autodiscover(): + from django.conf import settings + from django.utils.importlib import import_module + from django.utils.module_loading import module_has_submodule + + for app in settings.INSTALLED_APPS: + mod = import_module(app) + # Attempt to import the app's api module. + try: + import_module('%s.serialiser' % app) + except: + + # Decide whether to bubble up this error. If the app just + # doesn't have an admin module, we can ignore the error + # attempting to import it, otherwise we want it to bubble up. + if module_has_submodule(mod, 'serialiser'): + raise + +def patterns(flat=False): + patterns = [] + for api in APIS.values(): + patterns.extend(api.patterns(flat=flat)) + return patterns +
457cbeaa66fa504442c1303bec4df25e83ee35c3
froide/document/models.py
froide/document/models.py
from django.db import models from filingcabinet.models import ( AbstractDocument, AbstractDocumentCollection, ) class Document(AbstractDocument): original = models.ForeignKey( 'foirequest.FoiAttachment', null=True, blank=True, on_delete=models.SET_NULL, related_name='original_document' ) foirequest = models.ForeignKey( 'foirequest.FoiRequest', null=True, blank=True, on_delete=models.SET_NULL ) publicbody = models.ForeignKey( 'publicbody.PublicBody', null=True, blank=True, on_delete=models.SET_NULL ) def is_public(self): return self.public class DocumentCollection(AbstractDocumentCollection): pass
from django.db import models from filingcabinet.models import ( AbstractDocument, AbstractDocumentCollection, ) class Document(AbstractDocument): original = models.ForeignKey( 'foirequest.FoiAttachment', null=True, blank=True, on_delete=models.SET_NULL, related_name='original_document' ) foirequest = models.ForeignKey( 'foirequest.FoiRequest', null=True, blank=True, on_delete=models.SET_NULL ) publicbody = models.ForeignKey( 'publicbody.PublicBody', null=True, blank=True, on_delete=models.SET_NULL ) def is_public(self): return self.public def get_serializer_class(self, detail=False): from .api_views import DocumentSerializer, DocumentDetailSerializer if detail: return DocumentDetailSerializer return DocumentSerializer class DocumentCollection(AbstractDocumentCollection): pass
Add get_serializer_class to document model
Add get_serializer_class to document model
Python
mit
fin/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide
--- +++ @@ -24,6 +24,13 @@ def is_public(self): return self.public + def get_serializer_class(self, detail=False): + from .api_views import DocumentSerializer, DocumentDetailSerializer + + if detail: + return DocumentDetailSerializer + return DocumentSerializer + class DocumentCollection(AbstractDocumentCollection): pass
da0dc08d8fdd18a64ecc883404553c86de6a726c
test/functional/feature_shutdown.py
test/functional/feature_shutdown.py
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) Thread(target=test_long_call, args=(node,)).start() # wait 1 second to ensure event loop waits for current connections to close self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
#!/usr/bin/env python3 # Copyright (c) 2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): block = node.waitfornewblock() assert_equal(block['height'], 0) class ShutdownTest(BitcoinTestFramework): def set_test_params(self): self.setup_clean_chain = True self.num_nodes = 1 def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) # Force connection establishment by executing a dummy command. node.getblockcount() Thread(target=test_long_call, args=(node,)).start() # Wait until the server is executing the above `waitfornewblock`. wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) # Wait 1 second after requesting shutdown but not before the `stop` call # finishes. This is to ensure event loop waits for current connections # to close. self.stop_node(0, wait=1000) if __name__ == '__main__': ShutdownTest().main()
Remove race between connecting and shutdown on separate connections
qa: Remove race between connecting and shutdown on separate connections
Python
mit
DigitalPandacoin/pandacoin,DigitalPandacoin/pandacoin,peercoin/peercoin,peercoin/peercoin,DigitalPandacoin/pandacoin,peercoin/peercoin,peercoin/peercoin,DigitalPandacoin/pandacoin,DigitalPandacoin/pandacoin,peercoin/peercoin,DigitalPandacoin/pandacoin,peercoin/peercoin
--- +++ @@ -5,7 +5,7 @@ """Test bitcoind shutdown.""" from test_framework.test_framework import BitcoinTestFramework -from test_framework.util import assert_equal, get_rpc_proxy +from test_framework.util import assert_equal, get_rpc_proxy, wait_until from threading import Thread def test_long_call(node): @@ -20,8 +20,14 @@ def run_test(self): node = get_rpc_proxy(self.nodes[0].url, 1, timeout=600, coveragedir=self.nodes[0].coverage_dir) + # Force connection establishment by executing a dummy command. + node.getblockcount() Thread(target=test_long_call, args=(node,)).start() - # wait 1 second to ensure event loop waits for current connections to close + # Wait until the server is executing the above `waitfornewblock`. + wait_until(lambda: len(self.nodes[0].getrpcinfo()['active_commands']) == 2) + # Wait 1 second after requesting shutdown but not before the `stop` call + # finishes. This is to ensure event loop waits for current connections + # to close. self.stop_node(0, wait=1000) if __name__ == '__main__':
8fb421831bb562a80edf5c3de84d71bf2a3eec4b
tools/scrub_database.py
tools/scrub_database.py
import os import sys import django sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() from museum_site.models import * # noqa: E402 from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402 def main(): print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE") print("Are you sure you wish to remove all non-public data?") confirm = input("Type 'yes' to confirm: ") if confirm == "yes": print("Deleting articles...") for a in Article.objects.filter(published=REMOVED_ARTICLE): print(a) a.delete() print("Done!") print("Deleting file objects...") for f in File.objects.filter(details__id=DETAIL_REMOVED): print(f) f.delete() print("Done!") print("Private data has removed. Database can be publicly shared.") print("DONE.") else: print("ABORTED.") if __name__ == '__main__': main()
import datetime import os import sys import django sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() from django.contrib.sessions.models import Session from django.contrib.auth.models import User from museum_site.models import * # noqa: E402 from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402 def main(): print("WARNING! THIS WILL PERMANENTLY REMOVE DATA FROM THIS DATABASE") print("Are you sure you wish to remove all non-public data?") confirm = input("Type 'yes' to confirm: ") if confirm == "yes": print("Deleting articles...") for a in Article.objects.filter(published=REMOVED_ARTICLE): print(a) a.delete() print("Done!") print("Deleting file objects...") for f in File.objects.filter(details__id=DETAIL_REMOVED): print(f) f.delete() print("Done!") print("Deleting sessions...") Session.objects.all().delete() print("Done!") print("Clearing accounts...") qs = User.objects.all() for u in qs: u.username = "USER #" + str(u.id) u.first_name = "" u.last_name= "" u.email = "test@example.com" u.password = u.set_password("password") u.is_staff = False u.is_superuser = False u.save() print("Private data has removed. Database can be publicly shared.") print("DONE.") else: print("ABORTED.") if __name__ == '__main__': main()
Remove sessions when scrubbing DB for public release
Remove sessions when scrubbing DB for public release
Python
mit
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
--- +++ @@ -1,3 +1,5 @@ +import datetime + import os import sys @@ -6,6 +8,9 @@ sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings") django.setup() + +from django.contrib.sessions.models import Session +from django.contrib.auth.models import User from museum_site.models import * # noqa: E402 from museum_site.constants import REMOVED_ARTICLE, DETAIL_REMOVED # noqa: E402 @@ -27,6 +32,24 @@ print(f) f.delete() print("Done!") + print("Deleting sessions...") + Session.objects.all().delete() + print("Done!") + print("Clearing accounts...") + qs = User.objects.all() + for u in qs: + u.username = "USER #" + str(u.id) + u.first_name = "" + u.last_name= "" + u.email = "test@example.com" + u.password = u.set_password("password") + u.is_staff = False + u.is_superuser = False + u.save() + + + + print("Private data has removed. Database can be publicly shared.") print("DONE.") else:
058d9a5c9396522d60cf595820cf94a67b42c475
bigcommerce/resources/webhooks.py
bigcommerce/resources/webhooks.py
from .base import * class Webhooks(ListableApiResource, CreateableApiResource, UpdateableApiResource, DeleteableApiResource): resource_name = 'webhooks'
from .base import * class Webhooks(ListableApiResource, CreateableApiResource, UpdateableApiResource, DeleteableApiResource): resource_name = 'hooks'
Fix typo in resource name
Fix typo in resource name
Python
mit
hockeybuggy/bigcommerce-api-python,bigcommerce/bigcommerce-api-python
--- +++ @@ -3,4 +3,4 @@ class Webhooks(ListableApiResource, CreateableApiResource, UpdateableApiResource, DeleteableApiResource): - resource_name = 'webhooks' + resource_name = 'hooks'
8bc4a4a5c6ef82b43f78ac9bcd1ce7e2888e2e4b
backend/messages.py
backend/messages.py
# -*- coding: utf-8 -*- import json from enum import Enum class BEMessages(Enum): ALL_MAIN_BROADCAST = 'ALL_MAIN_BROADCAST' class FEMessages(Enum): pass class AllMainBroadCast(object): message_type = BEMessages.ALL_MAIN_BROADCAST def __init__(self): pass def broadcast(self): data = { 'type': self.message_type, 'content': '', # TODO: grab message data from class vars } # TODO: actually broadcast message json_content = json.dumps(data, ensure_ascii=False) print json_content
# -*- coding: utf-8 -*- import json from enum import Enum class BEMessages(Enum): ALL_MAIN_BROADCAST = 'ALL_MAIN_BROADCAST' class FEMessages(Enum): pass class AllMainBroadCast(object): message_type = BEMessages.ALL_MAIN_BROADCAST def __init__(self): pass def broadcast(self, handler): data = { 'type': self.message_type, 'content': 'TEST', # TODO: grab message data from class vars } json_content = json.dumps(data, ensure_ascii=False) handler.send(json_content)
Add handler send logic to message
Add handler send logic to message
Python
mit
verekia/hackarena,verekia/hackarena,verekia/hackarena,verekia/hackarena
--- +++ @@ -18,12 +18,11 @@ def __init__(self): pass - def broadcast(self): + def broadcast(self, handler): data = { 'type': self.message_type, - 'content': '', # TODO: grab message data from class vars + 'content': 'TEST', # TODO: grab message data from class vars } - # TODO: actually broadcast message json_content = json.dumps(data, ensure_ascii=False) - print json_content + handler.send(json_content)
197d2b1282d9f4c94535f6627ff151752bd8f063
c3po/provider/groupme/receive.py
c3po/provider/groupme/receive.py
"""Handles message receiving for GroupMe provider.""" import logging import json import time import flask from c3po.provider.groupme import send APP = flask.Flask(__name__) APP.config['DEBUG'] = True SUCCESS = ('', 200) @APP.route('/groupme/<bot_id>', methods=['POST']) def receive_message(bot_id): """Processes a message and returns a response.""" time.sleep(.1) logging.info("Request data: %s", flask.request.data) msg_data = json.loads(flask.request.data) group_id = msg_data['group_id'] name = msg_data['name'] text = msg_data['text'] time_sent = float(msg_data['created_at']) picture_url = None attachments = msg_data['attachments'] if attachments: if attachments[0]['type'] == 'image': picture_url = attachments[0]['url'] if not bot_id or not group_id or not name or (not text and not picture_url): flask.abort(400) logging.info("Group ID: %s", group_id) logging.info("Name: %s", name) logging.info("Text: %s", text) msg = send.GroupmeMessage(bot_id, name, picture_url, text, time_sent) if name == msg.settings.bot_name: logging.info("Ignoring request since it's coming from the bot.") return SUCCESS msg.process_message() return SUCCESS
"""Handles message receiving for GroupMe provider.""" import logging import json import time import flask from c3po.provider.groupme import send APP = flask.Flask(__name__) APP.config['DEBUG'] = True SUCCESS = ('', 200) @APP.route('/groupme/<bot_id>', methods=['POST']) def receive_message(bot_id): """Processes a message and returns a response.""" logging.info("Request data: %s", flask.request.data) msg_data = json.loads(flask.request.data) group_id = msg_data['group_id'] name = msg_data['name'] text = msg_data['text'] time_sent = float(msg_data['created_at']) picture_url = None attachments = msg_data['attachments'] if attachments: if attachments[0]['type'] == 'image': picture_url = attachments[0]['url'] if not bot_id or not group_id or not name or (not text and not picture_url): flask.abort(400) logging.info("Group ID: %s", group_id) logging.info("Name: %s", name) logging.info("Text: %s", text) msg = send.GroupmeMessage(bot_id, name, picture_url, text, time_sent) if name == msg.settings.bot_name: logging.info("Ignoring request since it's coming from the bot.") return SUCCESS msg.process_message() return SUCCESS
Remove delay when responding to messages
Remove delay when responding to messages Not needed anymore. Fixes #123
Python
apache-2.0
rhefner1/c3po,rhefner1/c3po
--- +++ @@ -17,7 +17,6 @@ @APP.route('/groupme/<bot_id>', methods=['POST']) def receive_message(bot_id): """Processes a message and returns a response.""" - time.sleep(.1) logging.info("Request data: %s", flask.request.data)
3aabe40ba9d65f730763a604d1869c3114886273
odin/compatibility.py
odin/compatibility.py
""" This module is to include utils for managing compatibility between Python and Odin releases. """ import inspect import warnings def deprecated(message, category=DeprecationWarning): """ Decorator for marking classes/functions as being deprecated and are to be removed in the future. :param message: Message provided. :param category: Category of warning, defaults to DeprecationWarning """ def wrap(obj): if inspect.isclass(obj): old_init = obj.__init__ def wrapped_init(*args, **kwargs): warnings.warn( "{0} is deprecated and scheduled for removal. {1}".format(obj.__name__, message), category=category ) return old_init(*args, **kwargs) obj.__init__ = wrapped_init return obj else: def wrapped_func(*args): warnings.warn( "{0} is deprecated and scheduled for removal. {1}".format(obj.__name__, message), category=category ) return obj(*args) return wrapped_func return wrap
""" This module is to include utils for managing compatibility between Python and Odin releases. """ import inspect import warnings def deprecated(message, category=DeprecationWarning): """ Decorator for marking classes/functions as being deprecated and are to be removed in the future. :param message: Message provided. :param category: Category of warning, defaults to DeprecationWarning """ def wrap(obj): if inspect.isclass(obj): old_init = obj.__init__ def wrapped_init(*args, **kwargs): warnings.warn( "{0} is deprecated and scheduled for removal. {1}".format(obj.__name__, message), category=category ) return old_init(*args, **kwargs) obj.__init__ = wrapped_init return obj else: def wrapped_func(*args, **kwargs): warnings.warn( "{0} is deprecated and scheduled for removal. {1}".format(obj.__name__, message), category=category ) return obj(*args, **kwargs) return wrapped_func return wrap
Support kwargs along with args for functions
Support kwargs along with args for functions
Python
bsd-3-clause
python-odin/odin
--- +++ @@ -28,12 +28,12 @@ return obj else: - def wrapped_func(*args): + def wrapped_func(*args, **kwargs): warnings.warn( "{0} is deprecated and scheduled for removal. {1}".format(obj.__name__, message), category=category ) - return obj(*args) + return obj(*args, **kwargs) return wrapped_func return wrap
39104d9b098a32ee6aa68eba9cb8d12127d3eb74
direlog.py
direlog.py
#!/usr/bin/env python # encoding: utf-8 import sys import re import argparse from patterns import pre_patterns def prepare(infile): """ Apply pre_patterns from patterns to infile :infile: input file """ try: for line in infile: result = line for pattern in pre_patterns: result = re.sub(pattern[0], pattern[1], result) sys.stdout.write(result) except (KeyboardInterrupt): pass def main(): parser = argparse.ArgumentParser(description='Parse file[s]') parser.add_argument('file', nargs='*', default=[], help='file[s] to do some work') parser.add_argument('-s', '--stat', action='store_const', const=True, help='get statistics') args = parser.parse_args() if not args.file: prepare(sys.stdin) else: for filename in args.file: with open(filename, 'r') as f: prepare(f) # if outfilename: # outfile = open(outfilename, 'w') # else: # outfile = sys.stdout pass if __name__ == '__main__': main()
#!/usr/bin/env python # encoding: utf-8 import sys import re import argparse from argparse import RawDescriptionHelpFormatter from patterns import pre_patterns def prepare(infile, outfile=sys.stdout): """ Apply pre_patterns from patterns to infile :infile: input file """ try: for line in infile: result = line for pattern in pre_patterns: result = re.sub(pattern[0], pattern[1], result, re.VERBOSE) outfile.write(result) except (KeyboardInterrupt): pass except: raise def main(): parser = argparse.ArgumentParser(description=\ """ Parse file[s]\n\n examlpe: cat error_log | tail -n 1000 | ./direlog.py """, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('file', nargs='*', default=[], help='file[s] to do some work') parser.add_argument('-s', '--stat', action='store_const', const=True, help='get statistics') args = parser.parse_args() if not args.file: prepare(sys.stdin) else: for filename in args.file: with open(filename, 'r') as f: prepare(f) pass if __name__ == '__main__': main()
Add some info and outfile to prepare function
Add some info and outfile to prepare function
Python
mit
abcdw/direlog,abcdw/direlog
--- +++ @@ -3,11 +3,12 @@ import sys import re import argparse +from argparse import RawDescriptionHelpFormatter from patterns import pre_patterns -def prepare(infile): +def prepare(infile, outfile=sys.stdout): """ Apply pre_patterns from patterns to infile @@ -19,14 +20,20 @@ for line in infile: result = line for pattern in pre_patterns: - result = re.sub(pattern[0], pattern[1], result) - sys.stdout.write(result) + result = re.sub(pattern[0], pattern[1], result, re.VERBOSE) + outfile.write(result) except (KeyboardInterrupt): pass + except: + raise def main(): - parser = argparse.ArgumentParser(description='Parse file[s]') + parser = argparse.ArgumentParser(description=\ + """ + Parse file[s]\n\n + examlpe: cat error_log | tail -n 1000 | ./direlog.py + """, formatter_class=RawDescriptionHelpFormatter) parser.add_argument('file', nargs='*', default=[], help='file[s] to do some work') parser.add_argument('-s', '--stat', action='store_const', const=True, @@ -40,11 +47,6 @@ with open(filename, 'r') as f: prepare(f) - # if outfilename: - # outfile = open(outfilename, 'w') - # else: - # outfile = sys.stdout - pass if __name__ == '__main__':
27acd078d04222e345a7939d5f74c6d43069832e
fabfile.py
fabfile.py
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/freemusic.ninja/django" def deploy(): with cd(env.directory): run("git pull --rebase") run("pip3 install -r requirements.txt") run("python3 manage.py collectstatic --noinput") run("python3 manage.py migrate") run("service gunicorn restart")
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/freemusic.ninja/django" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt", user='django') sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
Add more fabric commands and fix deploy command
Add more fabric commands and fix deploy command
Python
bsd-3-clause
FreeMusicNinja/freemusic.ninja,FreeMusicNinja/freemusic.ninja
--- +++ @@ -11,7 +11,26 @@ def deploy(): with cd(env.directory): run("git pull --rebase") - run("pip3 install -r requirements.txt") - run("python3 manage.py collectstatic --noinput") - run("python3 manage.py migrate") + sudo("pip3 install -r requirements.txt", user='django') + sudo("python3 manage.py collectstatic --noinput", user='django') + sudo("python3 manage.py migrate --noinput", user='django') run("service gunicorn restart") + + +def dbshell(): + with cd(env.directory): + sudo("python3 manage.py dbshell", user='django') + + +def shell(): + with cd(env.directory): + sudo("python3 manage.py shell", user='django') + + +def migrate(): + with cd(env.directory): + sudo("python3 manage.py migrate", user='django') + + +def gunicorn_restart(): + run("service gunicorn restart")
5d5f8e02efa6854bef0813e0e8383a3760cf93d2
os_brick/privileged/__init__.py
os_brick/privileged/__init__.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_privsep import capabilities as c from oslo_privsep import priv_context # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=[c.CAP_SYS_ADMIN], )
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from oslo_privsep import capabilities as c from oslo_privsep import priv_context capabilities = [c.CAP_SYS_ADMIN] # On virtual environments libraries are not owned by the Daemon user (root), so # the Daemon needs the capability to bypass file read permission checks in # order to dynamically load the code to run. if os.environ.get('VIRTUAL_ENV'): capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. default = priv_context.PrivContext( __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', capabilities=capabilities, )
Fix os-brick in virtual environments
Fix os-brick in virtual environments When running os-brick in a virtual environment created by a non root user, we get the following error: ModuleNotFoundError: No module named 'os_brick.privileged.rootwrap' This happens because the privsep daemon drops all the privileged except those defined in the context, and our current context doesn't bypass file read permission checks, so the Daemon cannot read the file with the code it was asked to run, because it belongs to a different user. This patch adds the CAP_DAC_READ_SEARCH capability to our privsep context so we can load the libraries, but only when we are running on a virtual environment to follow the principle of least privilege. This bug doesn't affect system-wide installations because the files installed under /sys/python*/site-packages belong to the Daemon user (root), so no special capabilities are necessary. Change-Id: Ib191c075ad1250822f6ac842f39214af8f3a02f0 Close-Bug: #1884059
Python
apache-2.0
openstack/os-brick,openstack/os-brick
--- +++ @@ -10,8 +10,19 @@ # License for the specific language governing permissions and limitations # under the License. +import os + from oslo_privsep import capabilities as c from oslo_privsep import priv_context + + +capabilities = [c.CAP_SYS_ADMIN] + +# On virtual environments libraries are not owned by the Daemon user (root), so +# the Daemon needs the capability to bypass file read permission checks in +# order to dynamically load the code to run. +if os.environ.get('VIRTUAL_ENV'): + capabilities.append(c.CAP_DAC_READ_SEARCH) # It is expected that most (if not all) os-brick operations can be # executed with these privileges. @@ -19,5 +30,5 @@ __name__, cfg_section='privsep_osbrick', pypath=__name__ + '.default', - capabilities=[c.CAP_SYS_ADMIN], + capabilities=capabilities, )
565ed49b29f09acf4fa79ba395a31b88792e91ce
setup.py
setup.py
#! /usr/bin/env python """Setup information of demandlib. """ from setuptools import setup, find_packages import os def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='demandlib', version='0.1.5dev', author='oemof developer group', url='https://oemof.org/', license='GPL3', author_email='oemof@rl-institut.de', description='Demandlib of the open energy modelling framework', long_description=read('README.rst'), packages=find_packages(), install_requires=['numpy >= 1.7.0, <= 1.14.3', 'pandas >= 0.18.0, <= 0.23'], package_data={ 'demandlib': [os.path.join('bdew_data', '*.csv')], 'demandlib.examples': ['*.csv']}, )
#! /usr/bin/env python """Setup information of demandlib. """ from setuptools import setup, find_packages import os def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() setup(name='demandlib', version='0.1.5dev', author='oemof developer group', url='https://oemof.org/', license='GPL3', author_email='oemof@rl-institut.de', description='Demandlib of the open energy modelling framework', long_description=read('README.rst'), packages=find_packages(), install_requires=['numpy >= 1.7.0, <= 1.15', 'pandas >= 0.18.0, <= 0.23.4'], package_data={ 'demandlib': [os.path.join('bdew_data', '*.csv')], 'demandlib.examples': ['*.csv']}, )
Allow newest versions of numpy and pandas
Allow newest versions of numpy and pandas
Python
mit
oemof/demandlib
--- +++ @@ -20,8 +20,8 @@ description='Demandlib of the open energy modelling framework', long_description=read('README.rst'), packages=find_packages(), - install_requires=['numpy >= 1.7.0, <= 1.14.3', - 'pandas >= 0.18.0, <= 0.23'], + install_requires=['numpy >= 1.7.0, <= 1.15', + 'pandas >= 0.18.0, <= 0.23.4'], package_data={ 'demandlib': [os.path.join('bdew_data', '*.csv')], 'demandlib.examples': ['*.csv']},
5aa48facaf77d8fb6919c960659dfa41f3f1ad78
fabfile.py
fabfile.py
import os from fabric.api import * def unit(): current_dir = os.path.dirname(__file__) command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir, "nosetests", "-s", "--verbose", "--with-coverage", "--cover-package=videolog", "tests/unit/*"]) local(command)
import os from fabric.api import * def clean(): current_dir = os.path.dirname(__file__) local("find %s -name '*.pyc' -exec rm -f {} \;" % current_dir) local("rm -rf %s/build" % current_dir) def unit(): clean() current_dir = os.path.dirname(__file__) command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir, "nosetests", "-s", "--verbose", "--with-coverage", "--cover-package=videolog", "tests/unit/*"]) local(command)
Add task clean() to remove *.pyc files
Add task clean() to remove *.pyc files
Python
mit
rcmachado/pyvideolog
--- +++ @@ -2,7 +2,13 @@ from fabric.api import * +def clean(): + current_dir = os.path.dirname(__file__) + local("find %s -name '*.pyc' -exec rm -f {} \;" % current_dir) + local("rm -rf %s/build" % current_dir) + def unit(): + clean() current_dir = os.path.dirname(__file__) command = " ".join(["PYTHONPATH=$PYTHONPATH:%s/videolog" % current_dir, "nosetests", "-s", "--verbose", "--with-coverage",
02090062a61e96fa6490181acaea1b8820109b98
hooks/post_gen_project.py
hooks/post_gen_project.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('post_gen_project') import shutil import os {% if cookiecutter.docs_tool == "mkdocs" %} logger.info('Moving files for mkdocs.') os.rename('mkdocs/mkdocs.yml', 'mkdocs.yml') shutil.move('mkdocs', 'docs') shutil.rmtree('sphinxdocs') {% elif cookiecutter.docs_tool == "sphinx" %} logger.info('Moving files for sphinx.') shutil.move('sphinxdocs', 'docs') shutil.rmtree('mkdocs') {% else %} logger.info('Removing all documentation files') shutil.rmtree('mkdocs') shutil.rmtree('sphinxdocs') {% endif %}
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('post_gen_project') import shutil import os {% if cookiecutter.docs_tool == "mkdocs" %} logger.info('Moving files for mkdocs.') os.rename('mkdocs/mkdocs.yml', 'mkdocs.yml') shutil.move('mkdocs', 'docs') shutil.rmtree('sphinxdocs') {% elif cookiecutter.docs_tool == "sphinx" %} logger.info('Moving files for sphinx.') shutil.move('sphinxdocs', 'docs') shutil.rmtree('mkdocs') {% else %} logger.info('Removing all documentation files') shutil.rmtree('mkdocs') shutil.rmtree('sphinxdocs') {% endif %} logger.info('Removing jinja2 macros') shutil.rmtree('macros')
Add an additional post gen hook to remove the jinja2 templates
Add an additional post gen hook to remove the jinja2 templates
Python
mit
pytest-dev/cookiecutter-pytest-plugin
--- +++ @@ -29,3 +29,7 @@ shutil.rmtree('sphinxdocs') {% endif %} + + +logger.info('Removing jinja2 macros') +shutil.rmtree('macros')
16aafc5ed95a7a0f830905d45c827dcc3cd67889
setup.py
setup.py
""" PiPocketGeiger ----- Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi. Links ````` * `code and documentation <https://github.com/MonsieurV/PiPocketGeiger>`_ """ import re import ast from setuptools import setup setup( name='PiPocketGeiger', version=0.1, url='https://github.com/MonsieurV/PiPocketGeiger', license='MIT', author='Yoan Tournade', author_email='yoan@ytotech.com', description='A library for monitoring radiation with the Radiation Watch ' 'Pocket Geiger.', long_description=__doc__, packages=['PiPocketGeiger'], include_package_data=True, zip_safe=True, platforms='any', install_requires=[ 'RPi.GPIO>=0.5.0a', ] )
""" ============== PiPocketGeiger ============== Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi. Usage ===== :: from PiPocketGeiger import RadiationWatch import time with RadiationWatch(24, 23) as radiationWatch: while 1: print(radiationWatch.status()) time.sleep(5) See GitHub repository for complete documentation. """ import re import ast from setuptools import setup setup( name='PiPocketGeiger', version='0.1a', url='https://github.com/MonsieurV/PiPocketGeiger', license='MIT', author='Yoan Tournade', author_email='yoan@ytotech.com', description='A library for monitoring radiation with the Radiation Watch ' 'Pocket Geiger.', long_description=__doc__, packages=['PiPocketGeiger'], include_package_data=True, zip_safe=True, platforms='any', install_requires=[ 'RPi.GPIO>=0.5.0a', ] )
Update pypi description and release new version
Update pypi description and release new version
Python
mit
MonsieurV/PiPocketGeiger
--- +++ @@ -1,11 +1,24 @@ """ +============== PiPocketGeiger ------ +============== + Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi. -Links -````` -* `code and documentation <https://github.com/MonsieurV/PiPocketGeiger>`_ +Usage +===== +:: + + from PiPocketGeiger import RadiationWatch + import time + + with RadiationWatch(24, 23) as radiationWatch: + while 1: + print(radiationWatch.status()) + time.sleep(5) + + +See GitHub repository for complete documentation. """ import re import ast @@ -13,7 +26,7 @@ setup( name='PiPocketGeiger', - version=0.1, + version='0.1a', url='https://github.com/MonsieurV/PiPocketGeiger', license='MIT', author='Yoan Tournade',
4cb1535b2e296b6f2471e17295e0ebe6fef7214c
fabfile.py
fabfile.py
from armstrong.dev.tasks import * settings = { 'DEBUG': True, 'INSTALLED_APPS': ( 'django.contrib.contenttypes', 'armstrong.core.arm_wells', 'armstrong.core.arm_wells.tests.arm_wells_support', ), 'TEMPLATE_CONTEXT_PROCESSORS': ( 'django.core.context_processors.request', ), 'ROOT_URLCONF': 'armstrong.core.arm_wells.tests.arm_wells_support.urls', } main_app = "arm_wells" full_name = "armstrong.core.arm_wells" tested_apps = (main_app, 'arm_wells_support') pip_install_first = True
from armstrong.dev.tasks import * settings = { 'DEBUG': True, 'INSTALLED_APPS': ( 'django.contrib.contenttypes', 'armstrong.core.arm_wells', 'armstrong.core.arm_wells.tests.arm_wells_support', 'south', ), 'TEMPLATE_CONTEXT_PROCESSORS': ( 'django.core.context_processors.request', ), 'ROOT_URLCONF': 'armstrong.core.arm_wells.tests.arm_wells_support.urls', } main_app = "arm_wells" full_name = "armstrong.core.arm_wells" tested_apps = (main_app, 'arm_wells_support') pip_install_first = True
Add south to list of installed apps to create migrations
Add south to list of installed apps to create migrations
Python
apache-2.0
armstrong/armstrong.core.arm_wells,dmclain/armstrong.core.arm_wells,texastribune/armstrong.core.arm_wells,armstrong/armstrong.core.arm_wells,texastribune/armstrong.core.arm_wells,dmclain/armstrong.core.arm_wells,armstrong/armstrong.core.arm_wells
--- +++ @@ -6,6 +6,7 @@ 'django.contrib.contenttypes', 'armstrong.core.arm_wells', 'armstrong.core.arm_wells.tests.arm_wells_support', + 'south', ), 'TEMPLATE_CONTEXT_PROCESSORS': ( 'django.core.context_processors.request',
55fed5d1ae2f7ad72eb4766d41440b2c50ff4fb2
setup.py
setup.py
#!/usr/bin/python # -*-coding:UTF-8 -*- from setuptools import setup, find_packages from os import path here = path.abspath(path.dirname(__file__)) setup( name='dictmysqldb', version='0.1.7', description='A mysql package above MySQL-python for more convenient database manipulation with Python dictionary.', author='Guangyang Li', author_email='mail@guangyangli.com', license='MIT', py_modules=['DictMySQLdb'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7' ], keywords='mysql database', packages=find_packages(exclude=['MySQL-python']), install_requires=['MySQL-python'], )
#!/usr/bin/python # -*-coding:UTF-8 -*- from setuptools import setup, find_packages from os import path here = path.abspath(path.dirname(__file__)) setup( name='dictmysqldb', version='0.1.8', description='A mysql package above MySQL-python for more convenient database manipulation with Python dictionary.', author='Guangyang Li', author_email='mail@guangyangli.com', license='MIT', py_modules=['DictMySQLdb'], classifiers=[ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7' ], keywords='mysql database', packages=find_packages(exclude=['MySQL-python']), install_requires=['MySQL-python'], )
Update the version to 0.1.8
Update the version to 0.1.8
Python
mit
ligyxy/DictMySQLdb,ligyxy/DictMySQL
--- +++ @@ -9,7 +9,7 @@ setup( name='dictmysqldb', - version='0.1.7', + version='0.1.8', description='A mysql package above MySQL-python for more convenient database manipulation with Python dictionary.',
63c2bdcf6cc3dae59f78abb59b14ca3e52789852
src/rlib/string_stream.py
src/rlib/string_stream.py
from rpython.rlib.streamio import Stream, StreamError class StringStream(Stream): def __init__(self, string): self._string = string self.pos = 0 self.max = len(string) - 1 def write(self, data): raise StreamError("StringStream is not writable") def truncate(self, size): raise StreamError("StringStream is immutable") def peek(self): if self.pos < self.max: return self._string[self.pos:] else: return '' def tell(self): return self.pos def seek(self, offset, whence): if whence == 0: self.pos = max(0, offset) elif whence == 1: self.pos = max(0, self.pos + offset) elif whence == 2: self.pos = max(0, self.max + offset) else: raise StreamError("seek(): whence must be 0, 1 or 2") def read(self, n): assert isinstance(n, int) end = self.pos + n data = self._string[self.pos:end] self.pos += len(data) return data
from rpython.rlib.streamio import Stream, StreamError class StringStream(Stream): def __init__(self, string): self._string = string self.pos = 0 self.max = len(string) - 1 def write(self, data): raise StreamError("StringStream is not writable") def truncate(self, size): raise StreamError("StringStream is immutable") def tell(self): return self.pos def seek(self, offset, whence): if whence == 0: self.pos = max(0, offset) elif whence == 1: self.pos = max(0, self.pos + offset) elif whence == 2: self.pos = max(0, self.max + offset) else: raise StreamError("seek(): whence must be 0, 1 or 2") def read(self, n): assert isinstance(n, int) end = self.pos + n assert end >= 0 data = self._string[self.pos:end] self.pos += len(data) return data
Fix StringStream to conform to latest pypy
Fix StringStream to conform to latest pypy Signed-off-by: Stefan Marr <46f1a0bd5592a2f9244ca321b129902a06b53e03@stefan-marr.de>
Python
mit
smarr/PySOM,smarr/PySOM,SOM-st/RPySOM,SOM-st/RPySOM,SOM-st/PySOM,SOM-st/PySOM
--- +++ @@ -1,4 +1,5 @@ from rpython.rlib.streamio import Stream, StreamError + class StringStream(Stream): def __init__(self, string): @@ -8,14 +9,9 @@ def write(self, data): raise StreamError("StringStream is not writable") + def truncate(self, size): raise StreamError("StringStream is immutable") - - def peek(self): - if self.pos < self.max: - return self._string[self.pos:] - else: - return '' def tell(self): return self.pos @@ -33,6 +29,7 @@ def read(self, n): assert isinstance(n, int) end = self.pos + n + assert end >= 0 data = self._string[self.pos:end] self.pos += len(data) return data
b9805bebaf3a3cc3116dfd528f4b7f5c6c959aa0
setup.py
setup.py
#!/usr/bin/env python from os.path import exists from setuptools import setup setup(name='cachey', version='0.1.1', description='Caching mindful of computation/storage costs', url='http://github.com/mrocklin/cachey/', maintainer='Matthew Rocklin', maintainer_email='mrocklin@gmail.com', license='BSD', keywords='', packages=['cachey'], install_requires=list(open('requirements.txt').read().strip().split('\n')), long_description=(open('README.md').read() if exists('README.md') else ''), zip_safe=False)
#!/usr/bin/env python from os.path import exists from setuptools import setup setup(name='cachey', version='0.1.1', description='Caching mindful of computation/storage costs', url='http://github.com/blaze/cachey/', maintainer='Matthew Rocklin', maintainer_email='mrocklin@gmail.com', license='BSD', keywords='', packages=['cachey'], install_requires=list(open('requirements.txt').read().strip().split('\n')), long_description=(open('README.md').read() if exists('README.md') else ''), zip_safe=False)
Change links to blaze org
Change links to blaze org
Python
bsd-3-clause
blaze/cachey,Winterflower/cachey,mrocklin/cachey
--- +++ @@ -6,7 +6,7 @@ setup(name='cachey', version='0.1.1', description='Caching mindful of computation/storage costs', - url='http://github.com/mrocklin/cachey/', + url='http://github.com/blaze/cachey/', maintainer='Matthew Rocklin', maintainer_email='mrocklin@gmail.com', license='BSD',
2300bd970de91c13b899f50b5f15c0d2cefaecb4
setup.py
setup.py
from setuptools import setup __version__ = None with open('mendeley/version.py') as f: exec(f.read()) setup( name='mendeley', version=__version__, packages=['mendeley'], url='http://dev.mendeley.com', license='MIT', author='Mendeley', author_email='api@mendeley.com', description='Python SDK for the Mendeley API', install_requires=[ 'arrow==0.4.4', 'future==0.13.0', 'memoized-property==1.0.2', 'requests==2.3.0', 'requests-oauthlib==0.4.1', ], tests_require=[ 'pytest==2.6.1', 'vcrpy==1.0.2' ] )
from setuptools import setup __version__ = None with open('mendeley/version.py') as f: exec(f.read()) setup( name='mendeley', version=__version__, packages=['mendeley'], url='http://dev.mendeley.com', license='MIT', author='Mendeley', author_email='api@mendeley.com', description='Python SDK for the Mendeley API', install_requires=[ 'arrow', 'future', 'memoized-property', 'requests', 'requests-oauthlib', ], tests_require=[ 'pytest', 'vcrpy' ] )
Return to using latest versions, now vcrpy is fixed.
Return to using latest versions, now vcrpy is fixed.
Python
apache-2.0
Mendeley/mendeley-python-sdk,lucidbard/mendeley-python-sdk
--- +++ @@ -15,15 +15,15 @@ description='Python SDK for the Mendeley API', install_requires=[ - 'arrow==0.4.4', - 'future==0.13.0', - 'memoized-property==1.0.2', - 'requests==2.3.0', - 'requests-oauthlib==0.4.1', + 'arrow', + 'future', + 'memoized-property', + 'requests', + 'requests-oauthlib', ], tests_require=[ - 'pytest==2.6.1', - 'vcrpy==1.0.2' + 'pytest', + 'vcrpy' ] )
cc379cb3e68ddf5a110eef139282c83dc8b8e9d1
tests/test_queue/test_queue.py
tests/test_queue/test_queue.py
import unittest from aids.queue.queue import Queue class QueueTestCase(unittest.TestCase): ''' Unit tests for the Queue data structure ''' def setUp(self): self.test_queue = Queue() def test_queue_initialization(self): self.assertTrue(isinstance(self.test_queue, Queue)) def test_queue_is_empty(self): self.assertTrue(self.test_queue.is_empty()) def tearDown(self): pass
import unittest from aids.queue.queue import Queue class QueueTestCase(unittest.TestCase): ''' Unit tests for the Queue data structure ''' def setUp(self): self.test_queue = Queue() def test_queue_initialization(self): self.assertTrue(isinstance(self.test_queue, Queue)) def test_queue_is_empty(self): self.assertTrue(self.test_queue.is_empty()) def test_queue_enqueue(self): self.test_queue.enqueue(1) self.assertEqual(len(self.test_queue), 1) def test_queue_dequeue(self): self.test_queue.enqueue(1) self.assertEqual(self.test_queue.dequeue(), 1) def test_queue_len(self): self.test_queue.enqueue(1) self.assertEqual(len(self.test_queue), 1) def tearDown(self): pass
Add unit tests for enqueue, dequeue and length for Queue
Add unit tests for enqueue, dequeue and length for Queue
Python
mit
ueg1990/aids
--- +++ @@ -17,5 +17,17 @@ def test_queue_is_empty(self): self.assertTrue(self.test_queue.is_empty()) + def test_queue_enqueue(self): + self.test_queue.enqueue(1) + self.assertEqual(len(self.test_queue), 1) + + def test_queue_dequeue(self): + self.test_queue.enqueue(1) + self.assertEqual(self.test_queue.dequeue(), 1) + + def test_queue_len(self): + self.test_queue.enqueue(1) + self.assertEqual(len(self.test_queue), 1) + def tearDown(self): pass
0da53a2d876baac9ef83ad1a9d606439e0672a09
system/t04_mirror/show.py
system/t04_mirror/show.py
from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s)
from lib import BaseTest import re class ShowMirror1Test(BaseTest): """ show mirror: regular mirror """ fixtureCmds = ["aptly mirror create mirror1 http://mirror.yandex.ru/debian/ wheezy"] runCmd = "aptly mirror show mirror1" class ShowMirror2Test(BaseTest): """ show mirror: missing mirror """ runCmd = "aptly mirror show mirror-xx" expectedCode = 1 class ShowMirror3Test(BaseTest): """ show mirror: regular mirror with packages """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:+A-Za-z -]+\n", "", s)
Add '+' to list of skipped symbols.
Add '+' to list of skipped symbols.
Python
mit
aptly-dev/aptly,gdbdzgd/aptly,aptly-dev/aptly,adfinis-forks/aptly,sobczyk/aptly,gearmover/aptly,seaninspace/aptly,bankonme/aptly,adfinis-forks/aptly,smira/aptly,neolynx/aptly,gdbdzgd/aptly,seaninspace/aptly,bankonme/aptly,ceocoder/aptly,neolynx/aptly,vincentbernat/aptly,bsundsrud/aptly,ceocoder/aptly,aptly-dev/aptly,gdbdzgd/aptly,jola5/aptly,vincentbernat/aptly,jola5/aptly,bsundsrud/aptly,bankonme/aptly,seaninspace/aptly,neolynx/aptly,smira/aptly,scalp42/aptly,scalp42/aptly,sobczyk/aptly,gearmover/aptly,sobczyk/aptly,gearmover/aptly,ceocoder/aptly,vincentbernat/aptly,scalp42/aptly,adfinis-forks/aptly,jola5/aptly,smira/aptly,bsundsrud/aptly
--- +++ @@ -24,4 +24,4 @@ """ fixtureDB = True runCmd = "aptly mirror show --with-packages wheezy-contrib" - outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:A-Za-z -]+\n", "", s) + outputMatchPrepare = lambda _, s: re.sub(r"Last update: [0-9:+A-Za-z -]+\n", "", s)
c96146226c693b8b5d1d13e0cf650b40f5e92df2
setup.py
setup.py
from setuptools import setup, find_packages setup( name='zeit.campus', version='1.6.4.dev0', author='Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi section Campus", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'grokcore.component', 'gocept.httpserverlayer', 'gocept.selenium', 'mock', 'plone.testing', 'setuptools', 'zeit.cms>=2.88.0.dev0', 'zeit.content.article', 'zeit.content.cp', 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.link', 'zeit.edit', 'zeit.push>=1.13.0.dev0', 'zope.component', 'zope.interface', 'zope.schema', ], entry_points={ 'fanstatic.libraries': [ 'zeit_campus=zeit.campus' '.browser.resources:lib', ], }, )
from setuptools import setup, find_packages setup( name='zeit.campus', version='1.6.4.dev0', author='Zeit Online', author_email='zon-backend@zeit.de', url='http://www.zeit.de/', description="vivi section Campus", packages=find_packages('src'), package_dir={'': 'src'}, include_package_data=True, zip_safe=False, license='BSD', namespace_packages=['zeit'], install_requires=[ 'grokcore.component', 'gocept.httpserverlayer', 'gocept.selenium', 'mock', 'plone.testing', 'setuptools', 'zeit.cms >= 3.0.dev0', 'zeit.content.article', 'zeit.content.cp', 'zeit.content.gallery', 'zeit.content.infobox', 'zeit.content.link', 'zeit.edit', 'zeit.push>=1.13.0.dev0', 'zope.component', 'zope.interface', 'zope.schema', ], entry_points={ 'fanstatic.libraries': [ 'zeit_campus=zeit.campus' '.browser.resources:lib', ], }, )
Update to version with celery.
ZON-3409: Update to version with celery.
Python
bsd-3-clause
ZeitOnline/zeit.campus
--- +++ @@ -21,7 +21,7 @@ 'mock', 'plone.testing', 'setuptools', - 'zeit.cms>=2.88.0.dev0', + 'zeit.cms >= 3.0.dev0', 'zeit.content.article', 'zeit.content.cp', 'zeit.content.gallery',
b5fa8ff1d86485c7f00ddecaef040ca66a817dfc
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup setup( name='freki', version='0.3.0-develop', description='PDF-Extraction helper for RiPLEs pipeline.', author='Michael Goodman, Ryan Georgi', author_email='goodmami@uw.edu, rgeorgi@uw.edu', url='https://github.com/xigt/freki', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Linguistic', 'Topic :: Utilities' ], keywords='nlp pdf ie text', packages=['freki', 'freki.readers', 'freki.analyzers'], install_requires=[ 'numpy', 'matplotlib' ], entry_points={ 'console_scripts': [ 'freki=freki.main:main' ] }, )
#!/usr/bin/env python from distutils.core import setup setup( name='freki', version='0.3.0-develop', description='PDF-Extraction helper for RiPLEs pipeline.', author='Michael Goodman, Ryan Georgi', author_email='goodmami@uw.edu, rgeorgi@uw.edu', url='https://github.com/xigt/freki', license='MIT', classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Information Technology', 'Intended Audience :: Science/Research', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Scientific/Engineering :: Information Analysis', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Linguistic', 'Topic :: Utilities' ], keywords='nlp pdf ie text', packages=['freki', 'freki.readers', 'freki.analyzers'], install_requires=[ 'numpy', 'matplotlib', 'chardet' ], entry_points={ 'console_scripts': [ 'freki=freki.main:main' ] }, )
Add Chardet as installation dependency
Add Chardet as installation dependency
Python
mit
xigt/freki,xigt/freki
--- +++ @@ -29,7 +29,8 @@ packages=['freki', 'freki.readers', 'freki.analyzers'], install_requires=[ 'numpy', - 'matplotlib' + 'matplotlib', + 'chardet' ], entry_points={ 'console_scripts': [
b6461f1f270f6c10f86d0a28c7dd6e37b8050059
setup.py
setup.py
from distutils.core import setup from setuptools import find_packages with open('README.md') as fp: long_description = fp.read() setup( name='sendwithus', version='5.2.0', author='sendwithus', author_email='us@sendwithus.com', packages=find_packages(), scripts=[], url='https://github.com/sendwithus/sendwithus_python', license='LICENSE.txt', description='Python API client for sendwithus.com', long_description=long_description, test_suite="sendwithus.test", install_requires=[ "requests >= 2.0.0", "six >= 1.9.0" ], extras_require={ "test": [ "pytest >= 3.0.5", "pytest-xdist >= 1.15.0" ] }, classifiers=[ "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "License :: OSI Approved :: Apache Software License", "Development Status :: 5 - Production/Stable", "Topic :: Communications :: Email" ] )
from distutils.core import setup from setuptools import find_packages with open('README.md') as fp: long_description = fp.read() setup( name='sendwithus', version='5.2.0', author='sendwithus', author_email='us@sendwithus.com', packages=find_packages(), scripts=[], url='https://github.com/sendwithus/sendwithus_python', license='LICENSE.txt', description='Python API client for sendwithus.com', long_description=long_description, long_description_content_type='text/markdown', test_suite="sendwithus.test", install_requires=[ "requests >= 2.0.0", "six >= 1.9.0" ], extras_require={ "test": [ "pytest >= 3.0.5", "pytest-xdist >= 1.15.0" ] }, classifiers=[ "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "License :: OSI Approved :: Apache Software License", "Development Status :: 5 - Production/Stable", "Topic :: Communications :: Email" ] )
Add a description content type for PyPI
Add a description content type for PyPI A long_description_content_type is required since our README is in markdown instead of restructured text.
Python
apache-2.0
sendwithus/sendwithus_python
--- +++ @@ -15,6 +15,7 @@ license='LICENSE.txt', description='Python API client for sendwithus.com', long_description=long_description, + long_description_content_type='text/markdown', test_suite="sendwithus.test", install_requires=[ "requests >= 2.0.0",
3cc25e574c38a1d8247a1edd4f70a2db72cb2538
setup.py
setup.py
from setuptools import setup config = { 'include_package_data': True, 'description': 'Simulated datasets of DNA', 'download_url': 'https://github.com/kundajelab/simdna', 'version': '0.4.3.3', 'packages': ['simdna', 'simdna.resources', 'simdna.synthetic'], 'package_data': {'simdna.resources': ['encode_motifs.txt.gz', 'HOCOMOCOv10_HUMAN_mono_homer_format_0.001.motif.gz']}, 'setup_requires': [], 'install_requires': ['numpy>=1.9', 'matplotlib', 'scipy'], 'dependency_links': [], 'scripts': ['scripts/densityMotifSimulation.py', 'scripts/emptyBackground.py', 'scripts/motifGrammarSimulation.py', 'scripts/variableSpacingGrammarSimulation.py'], 'name': 'simdna' } if __name__== '__main__': setup(**config)
from setuptools import setup config = { 'include_package_data': True, 'description': 'Simulated datasets of DNA', 'download_url': 'https://github.com/kundajelab/simdna', 'version': '0.4.3.2', 'packages': ['simdna', 'simdna.resources', 'simdna.synthetic'], 'package_data': {'simdna.resources': ['encode_motifs.txt.gz', 'HOCOMOCOv10_HUMAN_mono_homer_format_0.001.motif.gz']}, 'setup_requires': [], 'install_requires': ['numpy>=1.9', 'matplotlib', 'scipy'], 'dependency_links': [], 'scripts': ['scripts/densityMotifSimulation.py', 'scripts/emptyBackground.py', 'scripts/motifGrammarSimulation.py', 'scripts/variableSpacingGrammarSimulation.py'], 'name': 'simdna' } if __name__== '__main__': setup(**config)
Revert "Updating version now that docs are up-to-date"
Revert "Updating version now that docs are up-to-date" This reverts commit 3f2ed8f7bfbed7162f4047cea534d83e52e714af.
Python
mit
kundajelab/simdna,kundajelab/simdna
--- +++ @@ -4,7 +4,7 @@ 'include_package_data': True, 'description': 'Simulated datasets of DNA', 'download_url': 'https://github.com/kundajelab/simdna', - 'version': '0.4.3.3', + 'version': '0.4.3.2', 'packages': ['simdna', 'simdna.resources', 'simdna.synthetic'], 'package_data': {'simdna.resources': ['encode_motifs.txt.gz', 'HOCOMOCOv10_HUMAN_mono_homer_format_0.001.motif.gz']}, 'setup_requires': [],
293cad9d71c3cec7dacf486a4bb6da21e8d7df28
setup.py
setup.py
from setuptools import setup, find_packages setup( name='coverpy', version='0.0.2dev', packages=find_packages(), install_requires=['requests'], license='MIT License', long_description=open('README.md').read(), package_data = { '': ['*.txt', '*.md'], }, )
from setuptools import setup, find_packages setup( name='coverpy', version='0.8', packages=find_packages(exclude=['scripts', 'tests']), install_requires=['requests'], license='MIT License', author="fallenshell", author_email='dev@mxio.us', description="A wrapper for iTunes Search API", long_description=open('README.md').read(), package_data = { '': ['*.txt', '*.md'], } )
Exclude tests and cmdline scripts
Exclude tests and cmdline scripts
Python
mit
fallenshell/coverpy
--- +++ @@ -2,12 +2,15 @@ setup( name='coverpy', - version='0.0.2dev', - packages=find_packages(), + version='0.8', + packages=find_packages(exclude=['scripts', 'tests']), install_requires=['requests'], license='MIT License', + author="fallenshell", + author_email='dev@mxio.us', + description="A wrapper for iTunes Search API", long_description=open('README.md').read(), package_data = { '': ['*.txt', '*.md'], - }, + } )
73e3cee19d0330154f36157b762cd1a69e055b19
setup.py
setup.py
from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='pycc', version='0.0.1', url='https://github.com/kevinconway/pycc', license=license, description='Python code optimizer..', author='Kevin Conway', author_email='kevinjacobconway@gmail.com', long_description=readme, classifiers=[], packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), requires=['astkit'], entry_points = { 'console_scripts': [ 'pycc-lint = pycc.cli.lint:main', 'pycc-transform = pycc.cli.transform:main', 'pycc-compile = pycc.cli.compile:main', ], }, )
from setuptools import setup, find_packages with open('README.rst') as f: readme = f.read() with open('LICENSE') as f: license = f.read() setup( name='pycc', version='0.0.1', url='https://github.com/kevinconway/pycc', license=license, description='Python code optimizer..', author='Kevin Conway', author_email='kevinjacobconway@gmail.com', long_description=readme, classifiers=[], packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), requires=['astkit', 'pytest'], entry_points={ 'console_scripts': [ 'pycc-lint = pycc.cli.lint:main', 'pycc-transform = pycc.cli.transform:main', 'pycc-compile = pycc.cli.compile:main', ], }, )
Add package dependencies for printing and testing
Add package dependencies for printing and testing Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
Python
apache-2.0
kevinconway/pycc,kevinconway/pycc
--- +++ @@ -18,8 +18,8 @@ long_description=readme, classifiers=[], packages=find_packages(exclude=['tests', 'build', 'dist', 'docs']), - requires=['astkit'], - entry_points = { + requires=['astkit', 'pytest'], + entry_points={ 'console_scripts': [ 'pycc-lint = pycc.cli.lint:main', 'pycc-transform = pycc.cli.transform:main',
05650789f9ee950f6906a43806009a0fafb977a1
setup.py
setup.py
from setuptools import setup from subprocess import check_output, CalledProcessError try: num_gpus = len(check_output(['nvidia-smi', '--query-gpu=gpu_name', '--format=csv']).decode().strip().split('\n')) tf = 'tensorflow-gpu' if num_gpus > 1 else 'tensorflow' except CalledProcessError: tf = 'tensorflow' setup( name='autoencoder', version='0.1', description='An autoencoder implementation', author='Gokcen Eraslan', author_email="goekcen.eraslan@helmholtz-muenchen.de", packages=['autoencoder'], install_requires=[tf, 'numpy>=1.7', 'keras>=1.2', 'six>=1.10.0', 'scikit-learn', 'pandas' #for preprocessing ], url='https://github.com/gokceneraslan/autoencoder', entry_points={ 'console_scripts': [ 'autoencoder = autoencoder.__main__:main' ]}, license='Apache License 2.0', classifiers=['License :: OSI Approved :: Apache Software License', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
from setuptools import setup from subprocess import check_output, CalledProcessError try: num_gpus = len(check_output(['nvidia-smi', '--query-gpu=gpu_name', '--format=csv']).decode().strip().split('\n')) tf = 'tensorflow-gpu' if num_gpus > 1 else 'tensorflow' except CalledProcessError: tf = 'tensorflow' except FileNotFoundError: tf = 'tensorflow' setup( name='autoencoder', version='0.1', description='An autoencoder implementation', author='Gokcen Eraslan', author_email="goekcen.eraslan@helmholtz-muenchen.de", packages=['autoencoder'], install_requires=[tf, 'numpy>=1.7', 'keras>=1.2', 'six>=1.10.0', 'scikit-learn', 'pandas' #for preprocessing ], url='https://github.com/gokceneraslan/autoencoder', entry_points={ 'console_scripts': [ 'autoencoder = autoencoder.__main__:main' ]}, license='Apache License 2.0', classifiers=['License :: OSI Approved :: Apache Software License', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
Handle cases where nvidia-smi does not exist
Handle cases where nvidia-smi does not exist
Python
apache-2.0
theislab/dca,theislab/dca,theislab/dca
--- +++ @@ -6,6 +6,8 @@ '--format=csv']).decode().strip().split('\n')) tf = 'tensorflow-gpu' if num_gpus > 1 else 'tensorflow' except CalledProcessError: + tf = 'tensorflow' +except FileNotFoundError: tf = 'tensorflow'
1f391ca2ea88f3181b1c856012261db1327242ac
setup.py
setup.py
"""\ Grip ---- Render local readme files before sending off to Github. Grip is easy to set up `````````````````````` :: $ pip install grip $ cd myproject $ grip * Running on http://localhost:5000/ Links ````` * `Website <http://github.com/joeyespo/grip/>`_ """ from setuptools import setup, find_packages import grip as package setup( name=package.__name__, author='Joe Esposito', author_email='joe@joeyespo.com', url='http://github.com/joeyespo/grip', license='MIT', version=package.__version__, description=package.__description__, long_description=__doc__, platforms='any', packages=find_packages(), package_data={package.__name__: ['static/*', 'templates/*']}, entry_points={'console_scripts': ['grip = grip.command:main']}, install_requires=[ 'flask>=0.9', 'jinja2>=2.6', 'requests>=0.14', ], )
"""\ Grip ---- Render local readme files before sending off to Github. Grip is easy to set up `````````````````````` :: $ pip install grip $ cd myproject $ grip * Running on http://localhost:5000/ Links ````` * `Website <http://github.com/joeyespo/grip/>`_ """ from setuptools import setup, find_packages import grip as package setup( name=package.__name__, author='Joe Esposito', author_email='joe@joeyespo.com', url='http://github.com/joeyespo/grip', license='MIT', version=package.__version__, description=package.__description__, long_description=__doc__, platforms='any', packages=find_packages(), package_data={package.__name__: ['LICENSE', 'static/*', 'templates/*']}, entry_points={'console_scripts': ['grip = grip.command:main']}, install_requires=[ 'flask>=0.9', 'jinja2>=2.6', 'requests>=0.14', ], )
Add LINCENSE to included files.
Add LINCENSE to included files.
Python
mit
ssundarraj/grip,jbarreras/grip,mgoddard-pivotal/grip,mgoddard-pivotal/grip,joeyespo/grip,ssundarraj/grip,joeyespo/grip,jbarreras/grip
--- +++ @@ -37,7 +37,7 @@ long_description=__doc__, platforms='any', packages=find_packages(), - package_data={package.__name__: ['static/*', 'templates/*']}, + package_data={package.__name__: ['LICENSE', 'static/*', 'templates/*']}, entry_points={'console_scripts': ['grip = grip.command:main']}, install_requires=[ 'flask>=0.9',
889a2349efa1b76fd92981210798dc3e2d38d711
setup.py
setup.py
""" Setup script for the kvadratnet module. """ import os import subprocess from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ # This will fail if pandoc is not in system path. subprocess.call( [ "pandoc", "readme.md", "--from", "markdown", "--to", "rst", "-s", "-o", "readme.rst", ] ) with open("readme.rst") as f: readme = f.read() os.remove("readme.rst") return readme setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="kristianevers@gmail.com", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], )
""" Setup script for the kvadratnet module. """ from setuptools import setup import kvadratnet def readme(): """ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ with open("readme.md") as readme_file: descr = readme_file.read() return descr setup( name="kvadratnet", version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), long_description_content_type="text/markdown", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers", "Intended Audience :: Science/Research", "License :: OSI Approved :: ISC License (ISCL)", "Topic :: Scientific/Engineering :: GIS", "Topic :: Utilities", ], entry_points={"console_scripts": ["knet=knet:main"]}, keywords="kvadratnet gis tiling", url="https://github.com/kbevers/kvadratnet", author="Kristian Evers", author_email="kristianevers@gmail.com", license="ISC", py_modules=["kvadratnet", "knet"], test_suite="nose.collector", tests_require=["nose"], )
Use unaltered markdown readme for long_description
Use unaltered markdown readme for long_description
Python
isc
kbevers/kvadratnet,kbevers/kvadratnet
--- +++ @@ -1,9 +1,6 @@ """ Setup script for the kvadratnet module. """ - -import os -import subprocess from setuptools import setup import kvadratnet @@ -14,24 +11,9 @@ Return a properly formatted readme text, if possible, that can be used as the long description for setuptools.setup. """ - # This will fail if pandoc is not in system path. - subprocess.call( - [ - "pandoc", - "readme.md", - "--from", - "markdown", - "--to", - "rst", - "-s", - "-o", - "readme.rst", - ] - ) - with open("readme.rst") as f: - readme = f.read() - os.remove("readme.rst") - return readme + with open("readme.md") as readme_file: + descr = readme_file.read() + return descr setup( @@ -39,6 +21,7 @@ version=kvadratnet.__version__, description="Python tools for working with the Danish Kvadratnet tiling scheme.", long_description=readme(), + long_description_content_type="text/markdown", classifiers=[ "Development Status :: 3 - Alpha", "Intended Audience :: Developers",
e817716960e4e89798d976d0b04bf49408932f0b
setup.py
setup.py
from setuptools import setup, find_packages __version__ = None exec(open('tadtool/version.py').read()) setup( name='tadtool', version=__version__, description='Assistant to find cutoffs in TAD calling algorithms.', packages=find_packages(exclude=["test"]), install_requires=[ 'numpy>=1.9.0', 'matplotlib>=3.6.0', 'progressbar2', 'future', ], author='Vaquerizas lab', author_email='kai.kruse@mpi-muenster.mpg.de', url='https://github.com/vaquerizaslab/tadtool', download_url='https://github.com/vaquerizaslab/tadtool/tarball/0.81', keywords=['bioinformatics', 'hi-c', 'genomics', 'tad'], classifiers=[], scripts=['bin/tadtool'] )
import os from setuptools import setup, find_packages, Command __version__ = None exec(open('tadtool/version.py').read()) class CleanCommand(Command): """ Custom clean command to tidy up the project root. """ user_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): os.system('rm -vrf ./build ./dist ./*.pyc ./*.tgz ./*.egg-info ./htmlcov') setup( name='tadtool', version=__version__, description='Assistant to find cutoffs in TAD calling algorithms.', packages=find_packages(exclude=["test"]), install_requires=[ 'numpy>=1.9.0', 'matplotlib>=3.6.0', 'progressbar2', 'future', ], author='Vaquerizas lab', author_email='kai.kruse@mpi-muenster.mpg.de', url='https://github.com/vaquerizaslab/tadtool', keywords=['bioinformatics', 'hi-c', 'genomics', 'tad'], classifiers=[], scripts=['bin/tadtool'], cmdclass={ 'clean': CleanCommand }, )
Add clean command and remove download tarball
Add clean command and remove download tarball
Python
mit
vaquerizaslab/tadtool
--- +++ @@ -1,8 +1,25 @@ -from setuptools import setup, find_packages +import os +from setuptools import setup, find_packages, Command __version__ = None exec(open('tadtool/version.py').read()) + +class CleanCommand(Command): + """ + Custom clean command to tidy up the project root. + """ + user_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + os.system('rm -vrf ./build ./dist ./*.pyc ./*.tgz ./*.egg-info ./htmlcov') + setup( name='tadtool', @@ -18,8 +35,10 @@ author='Vaquerizas lab', author_email='kai.kruse@mpi-muenster.mpg.de', url='https://github.com/vaquerizaslab/tadtool', - download_url='https://github.com/vaquerizaslab/tadtool/tarball/0.81', keywords=['bioinformatics', 'hi-c', 'genomics', 'tad'], classifiers=[], - scripts=['bin/tadtool'] + scripts=['bin/tadtool'], + cmdclass={ + 'clean': CleanCommand + }, )
1a547646ee75841a016788aa64cf71c876a9dd8b
setup.py
setup.py
from setuptools import setup, find_packages from djcelery_ses import __version__ setup( name='django-celery-ses', version=__version__, description="django-celery-ses", author='tzangms', author_email='tzangms@streetvoice.com', url='http://github.com/StreetVoice/django-celery-ses', license='MIT', test_suite='runtests.runtests', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires = [ "django >= 1.3", "django-celery >= 3", ], classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "Framework :: Django", "Environment :: Web Environment", ], keywords='django,celery,mail', )
from setuptools import setup, find_packages from djcelery_ses import __version__ setup( name='django-celery-ses', version=__version__, description="django-celery-ses", author='tzangms', author_email='tzangms@streetvoice.com', url='http://github.com/StreetVoice/django-celery-ses', license='MIT', test_suite='runtests.runtests', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires = [ "django >= 1.3, < 1.9", "django-celery >= 3", ], classifiers=[ "Programming Language :: Python", "Topic :: Software Development :: Libraries :: Python Modules", "Framework :: Django", "Environment :: Web Environment", ], keywords='django,celery,mail', )
Set install_requires: django >= 1.3, < 1.9
Set install_requires: django >= 1.3, < 1.9
Python
mit
StreetVoice/django-celery-ses
--- +++ @@ -15,7 +15,7 @@ include_package_data=True, zip_safe=False, install_requires = [ - "django >= 1.3", + "django >= 1.3, < 1.9", "django-celery >= 3", ], classifiers=[
9646c595068f9c996f05de51d7216cb0443a9809
setup.py
setup.py
from distutils.core import setup from dyn import __version__ with open('README.rst') as f: readme = f.read() with open('HISTORY.rst') as f: history = f.read() setup( name='dyn', version=__version__, keywords=['dyn', 'api', 'dns', 'email', 'dyndns', 'dynemail'], long_description='\n\n'.join([readme, history]), description='Dyn REST API wrapper', author='Jonathan Nappi, Cole Tuininga', author_email='jnappi@dyn.com', url='https://github.com/dyninc/Dynect-API-Python-Library', packages=['dyn', 'dyn/tm', 'dyn/mm', 'dyn/tm/services'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Internet :: Name Service (DNS)', 'Topic :: Software Development :: Libraries', ], )
from distutils.core import setup from dyn import __version__ with open('README.rst') as f: readme = f.read() with open('HISTORY.rst') as f: history = f.read() setup( name='dyn', version=__version__, keywords=['dyn', 'api', 'dns', 'email', 'dyndns', 'dynemail'], long_description='\n\n'.join([readme, history]), description='Dyn REST API wrapper', author='Jonathan Nappi, Cole Tuininga', author_email='jnappi@dyn.com', url='https://github.com/moogar0880/dyn-python', packages=['dyn', 'dyn/tm', 'dyn/mm', 'dyn/tm/services'], classifiers=[ 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 3', 'Topic :: Internet :: Name Service (DNS)', 'Topic :: Software Development :: Libraries', ], )
Fix for incorrect project url
Fix for incorrect project url
Python
bsd-3-clause
Marchowes/dyn-python,dyninc/dyn-python,mjhennig/dyn-python
--- +++ @@ -14,7 +14,7 @@ description='Dyn REST API wrapper', author='Jonathan Nappi, Cole Tuininga', author_email='jnappi@dyn.com', - url='https://github.com/dyninc/Dynect-API-Python-Library', + url='https://github.com/moogar0880/dyn-python', packages=['dyn', 'dyn/tm', 'dyn/mm', 'dyn/tm/services'], classifiers=[ 'Programming Language :: Python :: 2',
4ee7ebe82f7f17ae10c838073ffbb319e1fff24f
setup.py
setup.py
import os from setuptools import setup version = '0.9.2.dev0' def read_file(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as fp: return fp.read() setup(name='django-ogmios', version=version, author="Fusionbox, Inc.", author_email="programmers@fusionbox.com", url="https://github.com/fusionbox/django-ogmios", keywords="email send easy simple helpers django", description="Just sends email. Simple, easy, multiformat.", long_description=read_file('README.rst') + '\n\n' + read_file('CHANGELOG.rst'), classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications :: Email', 'Topic :: Software Development :: Libraries' ], install_requires=[ 'Django>=1.7,<1.9' 'PyYAML', 'Markdown', 'html2text', ], packages=['ogmios'], )
import os from setuptools import setup version = '0.9.2.dev0' def read_file(fname): with open(os.path.join(os.path.dirname(__file__), fname)) as fp: return fp.read() setup(name='django-ogmios', version=version, author="Fusionbox, Inc.", author_email="programmers@fusionbox.com", url="https://github.com/fusionbox/django-ogmios", keywords="email send easy simple helpers django", description="Just sends email. Simple, easy, multiformat.", long_description=read_file('README.rst') + '\n\n' + read_file('CHANGELOG.rst'), classifiers=[ 'Development Status :: 4 - Beta', 'Framework :: Django', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Communications :: Email', 'Topic :: Software Development :: Libraries' ], install_requires=[ 'Django>=1.7,<1.9', 'PyYAML', 'Markdown', 'html2text', ], packages=['ogmios'], )
Add missing comma to requirements.
Add missing comma to requirements.
Python
bsd-2-clause
fusionbox/django-ogmios,fusionbox/django-ogmios
--- +++ @@ -30,7 +30,7 @@ 'Topic :: Software Development :: Libraries' ], install_requires=[ - 'Django>=1.7,<1.9' + 'Django>=1.7,<1.9', 'PyYAML', 'Markdown', 'html2text',