commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
788e6c081ba21fb75bc00195a82cb212542f7135 | django/contrib/comments/feeds.py | django/contrib/comments/feeds.py | from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| from django.conf import settings
from django.contrib.syndication.feeds import Feed
from django.contrib.sites.models import Site
from django.contrib import comments
class LatestCommentFeed(Feed):
"""Feed of latest comments on the current site."""
def title(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"%s comments" % self._site.name
def link(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return "http://%s/" % (self._site.domain)
def description(self):
if not hasattr(self, '_site'):
self._site = Site.objects.get_current()
return u"Latest comments on %s" % self._site.name
def items(self):
qs = comments.get_model().objects.filter(
site__pk = settings.SITE_ID,
is_public = True,
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40]
def item_pubdate(self, item):
return item.submit_date
| Use correct m2m join table name in LatestCommentsFeed | Use correct m2m join table name in LatestCommentsFeed
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9089 bcc190cf-cafb-0310-a4f2-bffc1f526a37
| Python | bsd-3-clause | jhoos/django,mttr/django,Leila20/django,GaussDing/django,h4r5h1t/django-hauthy,schinckel/django,ASCrookes/django,django-nonrel/django-nonrel,asser/django,whs/django,ckirby/django,whs/django,scorphus/django,nealtodd/django,beni55/django,leeon/annotated-django,andrewsmedina/django,nju520/django,taaviteska/django,marcelocure/django,jejimenez/django,akaihola/django,cobalys/django,koordinates/django,alimony/django,joakim-hove/django,marissazhou/django,zulip/django,yamila-moreno/django,rsvip/Django,krishna-pandey-git/django,payeldillip/django,auvipy/django,Y3K/django,yograterol/django,SujaySKumar/django,gcd0318/django,auvipy/django,ckirby/django,TridevGuha/django,matiasb/django,raphaelmerx/django,jsoref/django,divio/django,hnakamur/django,memtoko/django,blindroot/django,alimony/django,mttr/django,liuliwork/django,Adnn/django,doismellburning/django,cainmatt/django,z0by/django,gohin/django,rogerhu/django,elena/django,joakim-hove/django,kisna72/django,donkirkby/django,katrid/django,marissazhou/django,syphar/django,theo-l/django,drjeep/django,ulope/django,jasonwzhy/django,andela-ooladayo/django,redhat-openstack/django,mjtamlyn/django,savoirfairelinux/django,leereilly/django-1,lwiecek/django,jdelight/django,mdj2/django,caotianwei/django,lsqtongxin/django,aleida/django,andreif/django,BMJHayward/django,huang4fstudio/django,gengue/django,denys-duchier/django,dursk/django,aleida/django,GhostThrone/django,kennethlove/django,dhruvagarwal/django,katrid/django,bliti/django-nonrel-1.5,AlexHill/django,dracos/django,marqueedev/django,alilotfi/django,jeezybrick/django,ziima/django,seocam/django,tysonclugg/django,extremewaysback/django,ytjiang/django,riklaunim/django-custom-multisite,chrishas35/django-travis-ci,SujaySKumar/django,lunafeng/django,Argon-Zhou/django,jaywreddy/django,Proggie02/TestRepo,iambibhas/django,akshatharaj/django,crazy-canux/django,oinopion/django,litchfield/django,SoftwareMaven/django,hasadna/django,jasonwzhy/django,takis/django,liu602348184/django,georgemarshall/django,poiati/django,andela-ifageyinbo/django,alimony/django,elkingtonmcb/django,sadaf2605/django,stevenewey/django,mattseymour/django,riklaunim/django-custom-multisite,rsalmaso/django,craynot/django,mathspace/django,b-me/django,Nepherhotep/django,ccn-2m/django,extremewaysback/django,django-nonrel/django,henryfjordan/django,Balachan27/django,ar45/django,MarcJoan/django,wsmith323/django,gcd0318/django,olasitarska/django,barbuza/django,jeezybrick/django,sjlehtin/django,YYWen0o0/python-frame-django,akintoey/django,RaoUmer/django,whs/django,craynot/django,quamilek/django,JorgeCoock/django,mitar/django,ar45/django,xadahiya/django,spisneha25/django,edevil/django,krisys/django,anant-dev/django,ghedsouza/django,gengue/django,syphar/django,ivandevp/django,rmboggs/django,kangfend/django,RevelSystems/django,redhat-openstack/django,dpetzold/django,MounirMesselmeni/django,Beeblio/django,quxiaolong1504/django,TimBuckley/effective_django,eugena/django,ghickman/django,bitcity/django,hcsturix74/django,dydek/django,divio/django,xadahiya/django,gunchleoc/django,karyon/django,stevenewey/django,theo-l/django,kaedroho/django,bak1an/django,bak1an/django,Endika/django,sjlehtin/django,dydek/django,ABaldwinHunter/django-clone,Vixionar/django,ytjiang/django,piquadrat/django,jvkops/django,oberlin/django,dpetzold/django,rizumu/django,edmorley/django,katrid/django,hunter007/django,intgr/django,Nepherhotep/django,willharris/django,aidanlister/django,dudepare/django,etos/django,frePPLe/django,Mixser/django,poiati/django,Beauhurst/django,postrational/django,tomchristie/django,carljm/django,gohin/django,vincepandolfo/django,gcd0318/django,seanwestfall/django,frdb194/django,akaariai/django,davgibbs/django,riteshshrv/django,sbellem/django,schinckel/django,pquentin/django,RossBrunton/django,wweiradio/django,epandurski/django,mcardillo55/django,krisys/django,andreif/django,asser/django,shaistaansari/django,arun6582/django,devops2014/djangosite,beck/django,atul-bhouraskar/django,YYWen0o0/python-frame-django,techdragon/django,shaistaansari/django,redhat-openstack/django,aroche/django,shacker/django,piquadrat/django,ataylor32/django,ericfc/django,oinopion/django,WSDC-NITWarangal/django,alexallah/django,zsiciarz/django,arun6582/django,runekaagaard/django-contrib-locking,oberlin/django,jn7163/django,tragiclifestories/django,mitchelljkotler/django,ericfc/django,mmardini/django,rtindru/django,ajoaoff/django,myang321/django,jpic/django,anant-dev/django,pquentin/django,programadorjc/django,GitAngel/django,follow99/django,rmboggs/django,helenst/django,darkryder/django,dfunckt/django,baylee/django,rsalmaso/django,willharris/django,mdj2/django,sam-tsai/django,mdj2/django,BlindHunter/django,errx/django,lunafeng/django,asser/django,yask123/django,pipermerriam/django,guettli/django,Endika/django,imtapps/django-imt-fork,lunafeng/django,dydek/django,follow99/django,reinout/django,takeshineshiro/django,marcelocure/django,hobarrera/django,kangfend/django,devops2014/djangosite,frePPLe/django,kutenai/django,pjdelport/django,dfdx2/django,hynekcer/django,kamyu104/django,kisna72/django,dpetzold/django,jsoref/django,ojengwa/django-1,bitcity/django,rapilabs/django,memtoko/django,auready/django,chyeh727/django,rtindru/django,dursk/django,Balachan27/django,sdcooke/django,shownomercy/django,sdcooke/django,hassanabidpk/django,petecummings/django,Korkki/django,varunnaganathan/django,asser/django,ebar0n/django,pasqualguerrero/django,tayfun/django,rockneurotiko/django,leekchan/django_test,rsvip/Django,jasonbot/django,epandurski/django,MoritzS/django,druuu/django,YangSongzhou/django,bobcyw/django,pjdelport/django,manhhomienbienthuy/django,dhruvagarwal/django,darkryder/django,dydek/django,jgeskens/django,rynomster/django,sadaf2605/django,kisna72/django,yakky/django,yask123/django,avneesh91/django,akaihola/django,donkirkby/django,double-y/django,simone/django-gb,akaariai/django,PolicyStat/django,dwightgunning/django,kamyu104/django,mcardillo55/django,bobcyw/django,shtouff/django,simone/django-gb,TimYi/django,moreati/django,techdragon/django,kosz85/django,mcella/django,djbaldey/django,ryanahall/django,gunchleoc/django,denisenkom/django,yamila-moreno/django,georgemarshall/django,andyzsf/django,evansd/django,django/django,tysonclugg/django,x111ong/django,darkryder/django,abomyi/django,Adnn/django,deployed/django,adelton/django,denisenkom/django,leekchan/django_test,zhaodelong/django,mlavin/django,rogerhu/django,Anonymous-X6/django,mitar/django,HonzaKral/django,delhivery/django,Anonymous-X6/django,daniponi/django,xwolf12/django,avanov/django,ar45/django,oscaro/django,indevgr/django,areski/django,oinopion/django,rsalmaso/django,petecummings/django,shacker/django,BrotherPhil/django,krishna-pandey-git/django,evansd/django,carljm/django,alilotfi/django,ajoaoff/django,archen/django,mshafiq9/django,denis-pitul/django,ccn-2m/django,MikeAmy/django,simone/django-gb,jpic/django,gengue/django,jrrembert/django,redhat-openstack/django,ojake/django,WillGuan105/django,ABaldwinHunter/django-clone,Beeblio/django,koordinates/django,mojeto/django,alexmorozov/django,irwinlove/django,risicle/django,jnovinger/django,RaoUmer/django,salamer/django,georgemarshall/django,seocam/django,zedr/django,ericholscher/django,szopu/django,frishberg/django,dracos/django,MarcJoan/django,alrifqi/django,aspidites/django,gunchleoc/django,hackerbot/DjangoDev,jscn/django,daniponi/django,dwightgunning/django,Y3K/django,darjeeling/django,andyzsf/django,django-nonrel/django,jvkops/django,olasitarska/django,jgoclawski/django,HousekeepLtd/django,divio/django,wsmith323/django,ghedsouza/django,quxiaolong1504/django,lwiecek/django,chrisfranzen/django,henryfjordan/django,tuhangdi/django,koniiiik/django,rsvip/Django,github-account-because-they-want-it/django,mrbox/django,bobcyw/django,benjaminjkraft/django,georgemarshall/django,hkchenhongyi/django,jhoos/django,MatthewWilkes/django,scorphus/django,benspaulding/django,dfdx2/django,kosz85/django,sadaf2605/django,jejimenez/django,extremewaysback/django,dsanders11/django,ivandevp/django,joakim-hove/django,zhaodelong/django,whs/django,webgeodatavore/django,techdragon/django,ajoaoff/django,KokareIITP/django,programadorjc/django,joequery/django,archen/django,django-nonrel/django,MarcJoan/django,peterlauri/django,carljm/django,saydulk/django,MikeAmy/django,kosz85/django,simonw/django,DrMeers/django,saydulk/django,peterlauri/django,lsqtongxin/django,NullSoldier/django,timgraham/django,zhoulingjun/django,memtoko/django,aerophile/django,freakboy3742/django,hkchenhongyi/django,phalt/django,dursk/django,krishna-pandey-git/django,synasius/django,kswiat/django,matiasb/django,hellhovnd/django,sarthakmeh03/django,digimarc/django,sarthakmeh03/django,tcwicklund/django,sam-tsai/django,Beauhurst/django,wsmith323/django,pelme/django,reinout/django,lmorchard/django,irwinlove/django,varunnaganathan/django,camilonova/django,lsqtongxin/django,mshafiq9/django,avneesh91/django,caotianwei/django,litchfield/django,mmardini/django,jallohm/django,AltSchool/django,apocquet/django,alimony/django,vitan/django,manhhomienbienthuy/django,Proggie02/TestRepo,Leila20/django,jylaxp/django,payeldillip/django,WillGuan105/django,duqiao/django,andela-ooladayo/django,YangSongzhou/django,jn7163/django,Anonymous-X6/django,irwinlove/django,lmorchard/django,jenalgit/django,mojeto/django,BMJHayward/django,nielsvanoch/django,chrisfranzen/django,mathspace/django,andresgz/django,marqueedev/django,pasqualguerrero/django,knifenomad/django,ecederstrand/django,ghedsouza/django,aisipos/django,eyohansa/django,liuliwork/django,cobalys/django,dursk/django,xrmx/django,etos/django,blaze33/django,Yong-Lee/django,wetneb/django,zerc/django,adelton/django,oberlin/django,ABaldwinHunter/django-clone-classic,yewang15215/django,z0by/django,Sonicbids/django,andela-ifageyinbo/django,eugena/django,ArnossArnossi/django,BMJHayward/django,nju520/django,sarvex/django,irwinlove/django,aidanlister/django,rlugojr/django,makinacorpus/django,AndrewGrossman/django,alx-eu/django,coldmind/django,rrrene/django,oscaro/django,nhippenmeyer/django,hasadna/django,uranusjr/django,rajsadho/django,1013553207/django,karyon/django,mammique/django,mcrowson/django,jmcarp/django,archen/django,akshatharaj/django,abomyi/django,AltSchool/django,quxiaolong1504/django,abomyi/django,sopier/django,hcsturix74/django,hnakamur/django,claudep/django,roselleebarle04/django,JavML/django,dsanders11/django,theo-l/django,shownomercy/django,himleyb85/django,pauloxnet/django,gunchleoc/django,weiawe/django,sergei-maertens/django,sopier/django,dbaxa/django,stewartpark/django,xrmx/django,shaib/django,django/django,hunter007/django,RevelSystems/django,henryfjordan/django,delinhabit/django,savoirfairelinux/django,ojengwa/django-1,hellhovnd/django,adrianholovaty/django,mrfuxi/django,follow99/django,maxsocl/django,liavkoren/djangoDev,moreati/django,peterlauri/django,zedr/django,sephii/django,ebar0n/django,ABaldwinHunter/django-clone,jenalgit/django,vsajip/django,ajaali/django,freakboy3742/django,sbellem/django,feroda/django,coldmind/django,hottwaj/django,nemesisdesign/django,weiawe/django,BlindHunter/django,marckuz/django,eyohansa/django,t0in4/django,poiati/django,DasIch/django,andela-ooladayo/django,taaviteska/django,twz915/django,ericfc/django,kevintaw/django,Y3K/django,knifenomad/django,hunter007/django,frePPLe/django,gannetson/django,webgeodatavore/django,andyzsf/django,gchp/django,stewartpark/django,tuhangdi/django,erikr/django,Endika/django,mewtaylor/django,rogerhu/django,neiudemo1/django,nemesisdesign/django,synasius/django,liavkoren/djangoDev,gitaarik/django,ryanahall/django,dbaxa/django,blaze33/django,xwolf12/django,ulope/django,koniiiik/django,django-nonrel/django-nonrel,jmcarp/django,stevenewey/django,BlindHunter/django,yceruto/django,leeon/annotated-django,areski/django,DONIKAN/django,edevil/django,adrianholovaty/django,gdi2290/django,salamer/django,lzw120/django,zhoulingjun/django,mitya57/django,maxsocl/django,ASCrookes/django,marqueedev/django,marctc/django,reinout/django,BrotherPhil/django,liuliwork/django,jaywreddy/django,frishberg/django,ajaali/django,sgzsh269/django,techdragon/django,fafaman/django,kutenai/django,dhruvagarwal/django,zhoulingjun/django,djbaldey/django,xrmx/django,jarshwah/django,SebasSBM/django,lunafeng/django,akintoey/django,andrewsmedina/django,ticosax/django,NullSoldier/django,frankvdp/django,SoftwareMaven/django,chyeh727/django,alx-eu/django,adamchainz/django,djbaldey/django,elkingtonmcb/django,bikong2/django,harisibrahimkv/django,googleinterns/django,sarthakmeh03/django,stevenewey/django,ghickman/django,eyohansa/django,jhg/django,ironbox360/django,sarvex/django,leekchan/django_test,Nepherhotep/django,ziima/django,denis-pitul/django,dudepare/django,hkchenhongyi/django,andreif/django,KokareIITP/django,benjaminjkraft/django,hcsturix74/django,erikr/django,twz915/django,dbaxa/django,willhardy/django,kholidfu/django,Vixionar/django,mojeto/django,spisneha25/django,oscaro/django,jn7163/django,Balachan27/django,GaussDing/django,willhardy/django,zhoulingjun/django,mitya57/django,hottwaj/django,roselleebarle04/django,googleinterns/django,EmadMokhtar/Django,aspidites/django,guettli/django,treyhunner/django,eugena/django,rlugojr/django,mmardini/django,PolicyStat/django,auready/django,makinacorpus/django,shaib/django,wkschwartz/django,wweiradio/django,ataylor32/django,xadahiya/django,ryanahall/django,sadaf2605/django,aerophile/django,mjtamlyn/django,PetrDlouhy/django,rajsadho/django,maxsocl/django,Vixionar/django,sephii/django,takis/django,vmarkovtsev/django,arun6582/django,adambrenecki/django,simonw/django,elky/django,rizumu/django,ABaldwinHunter/django-clone-classic,dwightgunning/django,pipermerriam/django,eyohansa/django,vincepandolfo/django,1013553207/django,lisael/pg-django,tcwicklund/django,rynomster/django,litchfield/django,hnakamur/django,leereilly/django-1,ericfc/django,maxsocl/django,blaze33/django,tbeadle/django,felixjimenez/django,ccn-2m/django,seanwestfall/django,AlexHill/django,JorgeCoock/django,shownomercy/django,ifduyue/django,megaumi/django,simonw/django,ar45/django,JavML/django,mmardini/django,jaywreddy/django,andela-ifageyinbo/django,yask123/django,Mixser/django,kevintaw/django,delinhabit/django,alilotfi/django,jallohm/django,davidharrigan/django,frankvdp/django,jylaxp/django,zulip/django,pauloxnet/django,mcella/django,daniponi/django,IRI-Research/django,mcrowson/django,andresgz/django,akintoey/django,andela-ifageyinbo/django,Proggie02/TestRepo,marcelocure/django,django-nonrel/django-nonrel,elena/django,sarthakmeh03/django,hottwaj/django,riteshshrv/django,bikong2/django,zerc/django,dudepare/django,edmorley/django,blueyed/django,zanderle/django,rwillmer/django,wetneb/django,hasadna/django,Balachan27/django,DrMeers/django,lwiecek/django,elijah513/django,jarshwah/django,RevelSystems/django,aisipos/django,eugena/django,aspidites/django,ryangallen/django,x111ong/django,IRI-Research/django,rajsadho/django,ericholscher/django,himleyb85/django,blindroot/django,EliotBerriot/django,akaariai/django,arun6582/django,elijah513/django,ABaldwinHunter/django-clone,Proggie02/TestRepo,benspaulding/django,shacker/django,wetneb/django,felixxm/django,zerc/django,cobalys/django,haxoza/django,EmadMokhtar/Django,chrishas35/django-travis-ci,WSDC-NITWarangal/django,Korkki/django,karyon/django,solarissmoke/django,fpy171/django,ziima/django,hybrideagle/django,drjeep/django,shaib/django,imtapps/django-imt-fork,zsiciarz/django,mojeto/django,ziima/django,leereilly/django-1,tragiclifestories/django,rynomster/django,mitchelljkotler/django,DasIch/django,MounirMesselmeni/django,GhostThrone/django,nhippenmeyer/django,jarshwah/django,felixjimenez/django,DONIKAN/django,areski/django,mcrowson/django,marckuz/django,AndrewGrossman/django,hackerbot/DjangoDev,supriyantomaftuh/django,jenalgit/django,nju520/django,salamer/django,deployed/django,bobcyw/django,jyotsna1820/django,shownomercy/django,syaiful6/django,druuu/django,poiati/django,hnakamur/django,ptoraskar/django,yewang15215/django,Matt-Deacalion/django,stewartpark/django,kennethlove/django,SoftwareMaven/django,andela-ooladayo/django,SoftwareMaven/django,intgr/django,roselleebarle04/django,bikong2/django,mattseymour/django,robhudson/django,aerophile/django,mattrobenolt/django,nhippenmeyer/django,tanmaythakur/django,codepantry/django,Y3K/django,felixxm/django,aspidites/django,knifenomad/django,MoritzS/django,uranusjr/django,dudepare/django,DasIch/django,harisibrahimkv/django,MarkusH/django,jhoos/django,zedr/django,loic/django,jaywreddy/django,darjeeling/django,mathspace/django,unaizalakain/django,ecederstrand/django,filias/django,schinckel/django,auready/django,RossBrunton/django,lmorchard/django,apocquet/django,olasitarska/django,haxoza/django,Leila20/django,HonzaKral/django,harisibrahimkv/django,ckirby/django,jasonbot/django,SebasSBM/django,fpy171/django,oberlin/django,hunter007/django,adrianholovaty/django,dfdx2/django,gcd0318/django,dwightgunning/django,robhudson/django,django/django,marctc/django,dsanders11/django,feroda/django,Adnn/django,megaumi/django,kswiat/django,dfdx2/django,nju520/django,codepantry/django,Korkki/django,treyhunner/django,apollo13/django,donkirkby/django,TimBuckley/effective_django,Matt-Deacalion/django,django-nonrel/django,risicle/django,dfunckt/django,elky/django,apollo13/django,lzw120/django,chrishas35/django-travis-ci,TimBuckley/effective_django,PetrDlouhy/django,b-me/django,Leila20/django,rlugojr/django,kamyu104/django,dpetzold/django,cainmatt/django,jgoclawski/django,aroche/django,sarvex/django,yigitguler/django,adambrenecki/django,dracos/django,lsqtongxin/django,akintoey/django,uranusjr/django,myang321/django,ABaldwinHunter/django-clone-classic,vitan/django,nemesisdesign/django,davidharrigan/django,feroda/django,mewtaylor/django,neiudemo1/django,treyhunner/django,kennethlove/django,vsajip/django,WSDC-NITWarangal/django,yamila-moreno/django,ifduyue/django,mattseymour/django,neiudemo1/django,manhhomienbienthuy/django,EliotBerriot/django,Sonicbids/django,jscn/django,camilonova/django,xrmx/django,gohin/django,fenginx/django,mbox/django,ivandevp/django,ataylor32/django,alexallah/django,manhhomienbienthuy/django,myang321/django,koniiiik/django,phalt/django,sgzsh269/django,kevintaw/django,yakky/django,webgeodatavore/django,baylee/django,dgladkov/django,xadahiya/django,programadorjc/django,kamyu104/django,waytai/django,syaiful6/django,BlindHunter/django,takeshineshiro/django,vitaly4uk/django,ptoraskar/django,blighj/django,evansd/django,pipermerriam/django,marctc/django,avneesh91/django,JorgeCoock/django,shtouff/django,curtisstpierre/django,jdelight/django,feroda/django,SebasSBM/django,unaizalakain/django,bitcity/django,pquentin/django,schinckel/django,IRI-Research/django,beck/django,supriyantomaftuh/django,cainmatt/django,hellhovnd/django,mbox/django,zerc/django,matiasb/django,erikr/django,kosz85/django,rockneurotiko/django,PetrDlouhy/django,gdi2290/django,moreati/django,savoirfairelinux/django,spisneha25/django,frePPLe/django,vmarkovtsev/django,elkingtonmcb/django,rrrene/django,scorphus/django,ajaali/django,Argon-Zhou/django,mrbox/django,weiawe/django,bspink/django,MounirMesselmeni/django,jenalgit/django,h4r5h1t/django-hauthy,takis/django,jylaxp/django,timgraham/django,shtouff/django,adelton/django,barbuza/django,varunnaganathan/django,PetrDlouhy/django,delhivery/django,helenst/django,joakim-hove/django,double-y/django,saydulk/django,mjtamlyn/django,knifenomad/django,zulip/django,camilonova/django,neiudemo1/django,willhardy/django,NullSoldier/django,ajoaoff/django,pelme/django,errx/django,crazy-canux/django,barbuza/django,gohin/django,frdb194/django,tanmaythakur/django,fpy171/django,curtisstpierre/django,yewang15215/django,googleinterns/django,dex4er/django,abomyi/django,blighj/django,alrifqi/django,yigitguler/django,tbeadle/django,jasonbot/django,Argon-Zhou/django,haxoza/django,jejimenez/django,gitaarik/django,tomchristie/django,vitan/django,akshatharaj/django,loic/django,alexmorozov/django,dracos/django,raphaelmerx/django,gchp/django,monetate/django,rockneurotiko/django,digimarc/django,riklaunim/django-custom-multisite,tomchristie/django,hynekcer/django,apollo13/django,hobarrera/django,scorphus/django,joequery/django,jhoos/django,riteshshrv/django,denis-pitul/django,ulope/django,pelme/django,NullSoldier/django,nielsvanoch/django,mbox/django,elijah513/django,blueyed/django,hybrideagle/django,jasonbot/django,ghedsouza/django,avneesh91/django,elena/django,mcardillo55/django,EmadMokhtar/Django,leeon/annotated-django,Mixser/django,vincepandolfo/django,gdi2290/django,robhudson/django,dbaxa/django,katrid/django,aidanlister/django,huang4fstudio/django,denys-duchier/django,evansd/django,wetneb/django,darkryder/django,piquadrat/django,deployed/django,MoritzS/django,Mixser/django,t0in4/django,JorgeCoock/django,YangSongzhou/django,rapilabs/django,dhruvagarwal/django,anant-dev/django,jallohm/django,adamchainz/django,mlavin/django,guettli/django,waytai/django,WSDC-NITWarangal/django,takeshineshiro/django,rapilabs/django,elena/django,marissazhou/django,indevgr/django,tcwicklund/django,dex4er/django,jrrembert/django,lmorchard/django,donkirkby/django,sdcooke/django,ryangallen/django,kcpawan/django,SujaySKumar/django,ojake/django,RossBrunton/django,rapilabs/django,marissazhou/django,mcrowson/django,hynekcer/django,jsoref/django,ryangallen/django,yamila-moreno/django,jhg/django,felixjimenez/django,kholidfu/django,mattrobenolt/django,bikong2/django,quamilek/django,Argon-Zhou/django,krisys/django,tanmaythakur/django,adelton/django,mitya57/django,saydulk/django,h4r5h1t/django-hauthy,HousekeepLtd/django,github-account-because-they-want-it/django,DONIKAN/django,kutenai/django,bspink/django,jylaxp/django,Endika/django,treyhunner/django,Beauhurst/django,mcella/django,mttr/django,auready/django,charettes/django,jyotsna1820/django,spisneha25/django,blighj/django,indevgr/django,Yong-Lee/django,stewartpark/django,jn7163/django,MikeAmy/django,krisys/django,claudep/django,pjdelport/django,jgoclawski/django,zulip/django,shtouff/django,AndrewGrossman/django,cainmatt/django,tbeadle/django,marcelocure/django,hcsturix74/django,kevintaw/django,davgibbs/django,yakky/django,charettes/django,charettes/django,KokareIITP/django,synasius/django,pipermerriam/django,auvipy/django,dgladkov/django,vitaly4uk/django,atul-bhouraskar/django,hassanabidpk/django,beck/django,delinhabit/django,shacker/django,Beeblio/django,gdub/django,darjeeling/django,frdb194/django,gdub/django,davgibbs/django,hassanabidpk/django,nemesisdesign/django,taaviteska/django,rrrene/django,ryangallen/django,timgraham/django,daniponi/django,GitAngel/django,gannetson/django,solarissmoke/django,kcpawan/django,mattrobenolt/django,andresgz/django,denisenkom/django,lzw120/django,xwolf12/django,mattseymour/django,vsajip/django,ticosax/django,rhertzog/django,ryanahall/django,DrMeers/django,sdcooke/django,supriyantomaftuh/django,jasonwzhy/django,jallohm/django,sergei-maertens/django,adambrenecki/django,t0in4/django,coldmind/django,davidharrigan/django,yograterol/django,frishberg/django,marqueedev/django,wsmith323/django,beckastar/django,megaumi/django,1013553207/django,jvkops/django,fenginx/django,joequery/django,ArnossArnossi/django,litchfield/django,django/django,avanov/django,rlugojr/django,nhippenmeyer/django,kutenai/django,jhg/django,t0in4/django,duqiao/django,xwolf12/django,henryfjordan/django,marctc/django,elky/django,frankvdp/django,DasIch/django,rizumu/django,epandurski/django,RaoUmer/django,karyon/django,gdub/django,ccn-2m/django,crazy-canux/django,sjlehtin/django,alexallah/django,jmcarp/django,risicle/django,quamilek/django,duqiao/django,BMJHayward/django,jnovinger/django,ghickman/django,devops2014/djangosite,mshafiq9/django,tragiclifestories/django,mathspace/django,MarkusH/django,HonzaKral/django,denis-pitul/django,delinhabit/django,wkschwartz/django,chyeh727/django,syaiful6/django,areski/django,wweiradio/django,Anonymous-X6/django,Yong-Lee/django,rsvip/Django,codepantry/django,felixxm/django,frishberg/django,akaihola/django,GhostThrone/django,ptoraskar/django,koordinates/django,TridevGuha/django,yask123/django,WillGuan105/django,HousekeepLtd/django,djbaldey/django,jdelight/django,monetate/django,rynomster/django,quxiaolong1504/django,bak1an/django,etos/django,jscn/django,marckuz/django,ecederstrand/django,solarissmoke/django,gannetson/django,tayfun/django,pasqualguerrero/django,andreif/django,rtindru/django,MatthewWilkes/django,mammique/django,helenst/django,rmboggs/django,carljm/django,jgeskens/django,sopier/django,TimYi/django,JavML/django,mrbox/django,sergei-maertens/django,sgzsh269/django,x111ong/django,atul-bhouraskar/django,koniiiik/django,tomchristie/django,seocam/django,gengue/django,jarshwah/django,szopu/django,rmboggs/django,zhaodelong/django,seocam/django,aroche/django,mcella/django,hellhovnd/django,hackerbot/DjangoDev,szopu/django,myang321/django,Vixionar/django,mitchelljkotler/django,duqiao/django,tbeadle/django,aerophile/django,GaussDing/django,ASCrookes/django,jrrembert/django,reinout/django,mrfuxi/django,synasius/django,RaoUmer/django,SebasSBM/django,benjaminjkraft/django,mewtaylor/django,felixjimenez/django,indevgr/django,dsanders11/django,MikeAmy/django,apollo13/django,liu602348184/django,mewtaylor/django,adamchainz/django,z0by/django,iambibhas/django,1013553207/django,JavML/django,jgeskens/django,atul-bhouraskar/django,jnovinger/django,mattrobenolt/django,marckuz/django,akshatharaj/django,nealtodd/django,wkschwartz/django,beckastar/django,liavkoren/djangoDev,riteshshrv/django,Beauhurst/django,rwillmer/django,alexallah/django,Adnn/django,googleinterns/django,denys-duchier/django,ArnossArnossi/django,andrewsmedina/django,peterlauri/django,rsalmaso/django,auvipy/django,waytai/django,jscn/django,camilonova/django,TimYi/django,zsiciarz/django,ojake/django,dgladkov/django,doismellburning/django,yceruto/django,varunnaganathan/django,hobarrera/django,willharris/django,petecummings/django,elkingtonmcb/django,oinopion/django,raphaelmerx/django,ataylor32/django,GaussDing/django,blindroot/django,hobarrera/django,tayfun/django,sbellem/django,GitAngel/django,fpy171/django,pasqualguerrero/django,ABaldwinHunter/django-clone-classic,extremewaysback/django,digimarc/django,shaistaansari/django,Matt-Deacalion/django,baylee/django,akaariai/django,ckirby/django,coldmind/django,fafaman/django,jmcarp/django,darjeeling/django,GhostThrone/django,double-y/django,wkschwartz/django,ArnossArnossi/django,beck/django,hybrideagle/django,rhertzog/django,matiasb/django,hackerbot/DjangoDev,monetate/django,MarcJoan/django,craynot/django,makinacorpus/django,b-me/django,edmorley/django,crazy-canux/django,MatthewWilkes/django,YangSongzhou/django,BrotherPhil/django,yograterol/django,kswiat/django,jasonwzhy/django,fafaman/django,gannetson/django,labcodes/django,ojengwa/django-1,taaviteska/django,zhaodelong/django,vitaly4uk/django,mammique/django,liu602348184/django,bliti/django-nonrel-1.5,Korkki/django,dfunckt/django,phalt/django,alexmorozov/django,mitya57/django,edevil/django,alrifqi/django,fafaman/django,ojengwa/django-1,z0by/django,frdb194/django,ptoraskar/django,gchp/django,gitaarik/django,barbuza/django,postrational/django,YYWen0o0/python-frame-django,yewang15215/django,ojake/django,fenginx/django,github-account-because-they-want-it/django,ticosax/django,epandurski/django,chrisfranzen/django,freakboy3742/django,ericholscher/django,aroche/django,kaedroho/django,hkchenhongyi/django,takis/django,ajaali/django,ebar0n/django,aisipos/django,hybrideagle/django,mitar/django,Nepherhotep/django,ironbox360/django,hynekcer/django,mrfuxi/django,fenginx/django,TridevGuha/django,alexmorozov/django,huang4fstudio/django,ytjiang/django,beni55/django,ironbox360/django,pauloxnet/django,edmorley/django,denys-duchier/django,rajsadho/django,TimYi/django,WillGuan105/django,davidharrigan/django,RevelSystems/django,mjtamlyn/django,druuu/django,theo-l/django,andresgz/django,jnovinger/django,rockneurotiko/django,krishna-pandey-git/django,supriyantomaftuh/django,MarkusH/django,rizumu/django,curtisstpierre/django,jpic/django,bliti/django-nonrel-1.5,aidanlister/django,labcodes/django,ecederstrand/django,nealtodd/django,mlavin/django,aleida/django,zsiciarz/django,himleyb85/django,rtindru/django,blindroot/django,beckastar/django,beni55/django,elijah513/django,HousekeepLtd/django,rwillmer/django,hottwaj/django,mshafiq9/django,tcwicklund/django,haxoza/django,jsoref/django,liu602348184/django,h4r5h1t/django-hauthy,errx/django,loic/django,alx-eu/django,waytai/django,bak1an/django,postrational/django,unaizalakain/django,blueyed/django,vmarkovtsev/django,shaib/django,alrifqi/django,caotianwei/django,druuu/django,dgladkov/django,yceruto/django,TridevGuha/django,drjeep/django,huang4fstudio/django,rrrene/django,jgoclawski/django,MatthewWilkes/django,jhg/django,syphar/django,savoirfairelinux/django,chrisfranzen/django,double-y/django,kaedroho/django,oscaro/django,pauloxnet/django,AltSchool/django,charettes/django,lisael/pg-django,BrotherPhil/django,vincepandolfo/django,timgraham/django,megaumi/django,guettli/django,yakky/django,yigitguler/django,ebar0n/django,github-account-because-they-want-it/django,codepantry/django,labcodes/django,risicle/django,sopier/django,benjaminjkraft/django,AltSchool/django,mttr/django,b-me/django,GitAngel/django,lwiecek/django,vitaly4uk/django,solarissmoke/django,PolicyStat/django,seanwestfall/django,mrfuxi/django,adamchainz/django,EliotBerriot/django,dfunckt/django,mrbox/django,wweiradio/django,DONIKAN/django,yograterol/django,bitcity/django,ghickman/django,ASCrookes/django,robhudson/django,sarvex/django,jejimenez/django,Matt-Deacalion/django,doismellburning/django,anant-dev/django,sgzsh269/django,shaistaansari/django,ironbox360/django,himleyb85/django,zanderle/django,willhardy/django,ifduyue/django,rhertzog/django,etos/django,petecummings/django,sam-tsai/django,benspaulding/django,jdelight/django,seanwestfall/django,jvkops/django,koordinates/django,intgr/django,EliotBerriot/django,AlexHill/django,kcpawan/django,tragiclifestories/django,KokareIITP/django,syaiful6/django,sjlehtin/django,alx-eu/django,vitan/django,ticosax/django,apocquet/django,piquadrat/django,salamer/django,webgeodatavore/django,sbellem/django,mitchelljkotler/django,bspink/django,programadorjc/django,baylee/django,avanov/django,kholidfu/django,gchp/django,kisna72/django,AndrewGrossman/django,craynot/django,imtapps/django-imt-fork,delhivery/django,bspink/django,labcodes/django,MoritzS/django,quamilek/django,gitaarik/django,jpic/django,gdub/django,vmarkovtsev/django,divio/django,follow99/django,intgr/django,rwillmer/django,rhertzog/django,monetate/django,avanov/django,willharris/django,sephii/django,tuhangdi/django,lisael/pg-django,beckastar/django,tuhangdi/django,zanderle/django,tysonclugg/django,filias/django,felixxm/django,Yong-Lee/django,ivandevp/django,takeshineshiro/django,filias/django,kholidfu/django,blueyed/django,digimarc/django,ytjiang/django,phalt/django,x111ong/django,RossBrunton/django,jrrembert/django,tanmaythakur/django,unaizalakain/django,hassanabidpk/django,weiawe/django,Beeblio/django,chyeh727/django,elky/django,ifduyue/django,moreati/django,drjeep/django,MounirMesselmeni/django,iambibhas/django,payeldillip/django,alilotfi/django,nielsvanoch/django,runekaagaard/django-contrib-locking,HonzaKral/django,loic/django,aisipos/django,twz915/django,beni55/django,jeezybrick/django,kangfend/django,roselleebarle04/django,jyotsna1820/django,dex4er/django,kcpawan/django,harisibrahimkv/django,claudep/django,delhivery/django,nealtodd/django,filias/django,runekaagaard/django-contrib-locking,MarkusH/django,simonw/django,SujaySKumar/django,claudep/django,kangfend/django,twz915/django,zanderle/django,apocquet/django,syphar/django,uranusjr/django,frankvdp/django,tysonclugg/django,joequery/django,davgibbs/django,raphaelmerx/django,jyotsna1820/django,Sonicbids/django,payeldillip/django,tayfun/django,mcardillo55/django,blighj/django,jeezybrick/django,sam-tsai/django,mlavin/django,erikr/django,sergei-maertens/django,curtisstpierre/django,caotianwei/django,liuliwork/django | ---
+++
@@ -28,7 +28,7 @@
is_removed = False,
)
if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None):
- where = ['user_id NOT IN (SELECT user_id FROM auth_users_group WHERE group_id = %s)']
+ where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)']
params = [settings.COMMENTS_BANNED_USERS_GROUP]
qs = qs.extra(where=where, params=params)
return qs.order_by('-submit_date')[:40] |
0fe9fdce0ed5f0200450fd7e0e2bc9842ea4375d | settings.py | settings.py | from datetime import datetime, timedelta
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
DATE_FORMAT = '%Y-%m-%d %H:%M:%S GMT'
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'occurred_on': {
'type': 'datetime',
'default': datetime.now(),
},
'source': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
| from datetime import datetime, timedelta
RESOURCE_METHODS = ['GET', 'POST', 'DELETE']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
DATE_FORMAT = '%Y-%m-%d %H:%M:%S GMT'
schema = {
'name': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'occurred_on': {
'type': 'datetime',
'default': datetime.utcnow(),
},
'source': {
'type': 'string',
'minlength': 2,
'maxlength': 50,
'required': True,
},
'reporter': {
'type': 'string',
'minlength': 3,
'maxlength': 20,
'required': True,
},
'details': {
'type': 'string',
'minlength': 0,
'maxlength': 300,
'required': False
},
}
event = {
'item_title': 'event',
'additional_lookup': {
'url': 'regex("[\w]+")',
'field': 'name',
},
'cache_control': 'max-age=10, must-revalidate',
'cache_expires': 10,
'resource_methods': ['GET', 'POST'],
'schema': schema
}
DOMAIN = {
'event': event,
}
MONGO_HOST = 'localhost'
MONGO_PORT = 27017
MONGO_USERNAME = ''
MONGO_PASSWORD = ''
MONGO_DBNAME = 'historia'
| Change default value to use UTC time | Change default value to use UTC time
| Python | mit | waoliveros/historia | ---
+++
@@ -13,7 +13,7 @@
},
'occurred_on': {
'type': 'datetime',
- 'default': datetime.now(),
+ 'default': datetime.utcnow(),
},
'source': {
'type': 'string', |
80e84d2970114bcf7362c7d280dc12131e55871a | main.py | main.py | from multiprocessing import freeze_support
from ELiDE.app import ELiDEApp
import sys
import os
wd = os.getcwd()
sys.path.extend([wd + '/LiSE', wd + '/ELiDE', wd + '/allegedb'])
def get_application_config(*args):
return wd + '/ELiDE.ini'
if __name__ == '__main__':
freeze_support()
app = ELiDEApp()
app.get_application_config = get_application_config
app.run()
| from multiprocessing import freeze_support
import sys
import os
wd = os.getcwd()
sys.path.extend([wd + '/LiSE', wd + '/ELiDE', wd + '/allegedb'])
def get_application_config(*args):
return wd + '/ELiDE.ini'
if __name__ == '__main__':
freeze_support()
from ELiDE.app import ELiDEApp
app = ELiDEApp()
app.get_application_config = get_application_config
app.run()
| Fix extra blank window on Windows via PyInstaller | Fix extra blank window on Windows via PyInstaller
| Python | agpl-3.0 | LogicalDash/LiSE,LogicalDash/LiSE | ---
+++
@@ -1,5 +1,4 @@
from multiprocessing import freeze_support
-from ELiDE.app import ELiDEApp
import sys
import os
wd = os.getcwd()
@@ -13,6 +12,7 @@
if __name__ == '__main__':
freeze_support()
+ from ELiDE.app import ELiDEApp
app = ELiDEApp()
app.get_application_config = get_application_config
app.run() |
bb522a33096d7db252c02fda02e6419548094813 | runtests.py | runtests.py | #!/usr/bin/env python
# Adapted from https://raw.githubusercontent.com/hzy/django-polarize/master/runtests.py
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'tests',
),
MIDDLEWARE_CLASSES=(
),
ROOT_URLCONF='tests.urls',
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
# Adapted from https://raw.githubusercontent.com/hzy/django-polarize/master/runtests.py
import sys
from django.conf import settings
from django.core.management import execute_from_command_line
import django
if django.VERSION < (1, 6):
extra_settings = {
'TEST_RUNNER': 'discover_runner.DiscoverRunner',
}
else:
extra_settings = {}
if not settings.configured:
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
INSTALLED_APPS=(
'tests',
),
MIDDLEWARE_CLASSES=(
),
ROOT_URLCONF='tests.urls',
**extra_settings,
)
def runtests():
argv = sys.argv[:1] + ['test', 'tests']
execute_from_command_line(argv)
if __name__ == '__main__':
runtests()
| Use discover runner on older djangos | Use discover runner on older djangos
| Python | bsd-3-clause | limbera/django-nap,MarkusH/django-nap | ---
+++
@@ -6,6 +6,15 @@
from django.conf import settings
from django.core.management import execute_from_command_line
+
+import django
+
+if django.VERSION < (1, 6):
+ extra_settings = {
+ 'TEST_RUNNER': 'discover_runner.DiscoverRunner',
+ }
+else:
+ extra_settings = {}
if not settings.configured:
@@ -21,6 +30,7 @@
MIDDLEWARE_CLASSES=(
),
ROOT_URLCONF='tests.urls',
+ **extra_settings,
)
|
f155668bb622e442daffef86a2efddc89b48239a | climate_data/migrations/0026_auto_20170627_1914.py | climate_data/migrations/0026_auto_20170627_1914.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-27 19:14
from __future__ import unicode_literals
import django.contrib.postgres.fields.ranges
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0025_auto_20170627_1850'),
]
operations = [
migrations.AddField(
model_name='datatype',
name='bounds_2',
field=django.contrib.postgres.fields.ranges.FloatRangeField(default='[-2147483648,2147483647)'),
),
migrations.AlterField(
model_name='datatype',
name='bounds',
field=django.contrib.postgres.fields.ranges.IntegerRangeField(default='[-2147483648,2147483647]'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-27 19:14
from __future__ import unicode_literals
import django.contrib.postgres.fields.ranges
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('climate_data', '0025_auto_20170627_1850'),
]
operations = [
migrations.AddField(
model_name='datatype',
name='bounds_2',
field=django.contrib.postgres.fields.ranges.FloatRangeField(default='[-2147483648,2147483647)'),
),
migrations.AlterField(
model_name='datatype',
name='bounds',
field=django.contrib.postgres.fields.ranges.IntegerRangeField(default='[-2147483648,2147483646]'),
),
]
| Fix an integer out of range error with a migration to the transient integer bounds field (superseded by a float bounds field). | Fix an integer out of range error with a migration to the transient integer bounds field (superseded by a float bounds field).
| Python | apache-2.0 | qubs/data-centre,qubs/climate-data-api,qubs/climate-data-api,qubs/data-centre | ---
+++
@@ -21,6 +21,6 @@
migrations.AlterField(
model_name='datatype',
name='bounds',
- field=django.contrib.postgres.fields.ranges.IntegerRangeField(default='[-2147483648,2147483647]'),
+ field=django.contrib.postgres.fields.ranges.IntegerRangeField(default='[-2147483648,2147483646]'),
),
] |
cd14ff6463d12c1e97be014766094ef90945e783 | django_extensions/management/technical_response.py | django_extensions/management/technical_response.py | # -*- coding: utf-8 -*-
import six
from django.core.handlers.wsgi import WSGIHandler
wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
an error view.
Runserver_plus overrides the django debug view's technical_500_response function with this
to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
global wsgi_tb
# After an uncaught exception is raised the class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
wsgi_tb = tb
six.reraise(exc_type, exc_value, tb)
else:
six.reraise(exc_type, exc_value, wsgi_tb)
| # -*- coding: utf-8 -*-
import threading
import six
from django.core.handlers.wsgi import WSGIHandler
tld = threading.local()
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If and uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
tld.wsgi_tb = tb
elif tld.wsgi_tb:
tb = tld.wsgi_tb
six.reraise(exc_type, exc_value, tb)
| Replace global var with threading.local | Replace global var with threading.local
| Python | mit | django-extensions/django-extensions,django-extensions/django-extensions,django-extensions/django-extensions | ---
+++
@@ -1,31 +1,34 @@
# -*- coding: utf-8 -*-
+import threading
+
import six
from django.core.handlers.wsgi import WSGIHandler
-wsgi_tb = None
+tld = threading.local()
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
- Django's convert_exception_to_response wrapper is called on each 'Middleware' object to avoid
- leaking exceptions. The wrapper eventually calls technical_500_response to create a response for
- an error view.
+ Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
+ leaking exceptions. If and uncaught exception is raised, the wrapper calls technical_500_response()
+ to create a response for django's debug view.
- Runserver_plus overrides the django debug view's technical_500_response function with this
- to allow for an enhanced WSGI debugger view to be displayed. However, because Django calls
- convert_exception_to_response on each object in the stack of Middleware objects, re-raising an error
- quickly pollutes the traceback displayed.
+ Runserver_plus overrides the django debug view's technical_500_response() function to allow for
+ an enhanced WSGI debugger view to be displayed. However, because Django calls
+ convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
+ error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
- only raise the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
+ only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
- global wsgi_tb
- # After an uncaught exception is raised the class can be found in the second frame of the tb
+ # Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
- wsgi_tb = tb
- six.reraise(exc_type, exc_value, tb)
- else:
- six.reraise(exc_type, exc_value, wsgi_tb)
+ tld.wsgi_tb = tb
+
+ elif tld.wsgi_tb:
+ tb = tld.wsgi_tb
+
+ six.reraise(exc_type, exc_value, tb) |
4ea6a11341c2bbd978d5e0e416c398a442158da6 | whip/web.py | whip/web.py | """
Whip's REST API
"""
# pylint: disable=missing-docstring
from socket import inet_aton
from flask import Flask, abort, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
try:
key = inet_aton(ip)
except OSError:
abort(400)
datetime = request.args.get('datetime')
info_as_json = db.lookup(key, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
| """
Whip's REST API
"""
# pylint: disable=missing-docstring
from flask import Flask, make_response, request
from .db import Database
app = Flask(__name__)
app.config.from_envvar('WHIP_SETTINGS', silent=True)
db = None
@app.before_first_request
def _open_db():
global db # pylint: disable=global-statement
db = Database(app.config['DATABASE_DIR'])
@app.route('/ip/<ip>')
def lookup(ip):
datetime = request.args.get('datetime')
info_as_json = db.lookup(ip, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded
response = make_response(info_as_json)
response.headers['Content-type'] = 'application/json'
return response
| Handle IPv6 in REST API | Handle IPv6 in REST API
| Python | bsd-3-clause | wbolster/whip | ---
+++
@@ -4,9 +4,7 @@
# pylint: disable=missing-docstring
-from socket import inet_aton
-
-from flask import Flask, abort, make_response, request
+from flask import Flask, make_response, request
from .db import Database
@@ -25,13 +23,8 @@
@app.route('/ip/<ip>')
def lookup(ip):
- try:
- key = inet_aton(ip)
- except OSError:
- abort(400)
-
datetime = request.args.get('datetime')
- info_as_json = db.lookup(key, datetime)
+ info_as_json = db.lookup(ip, datetime)
if info_as_json is None:
info_as_json = b'{}' # empty dict, JSON-encoded |
43004cfd537c801475bf7e3b3c80dee4da18712f | backend/hook_manager.py | backend/hook_manager.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2014 Université Catholique de Louvain.
#
# This file is part of INGInious.
#
# INGInious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INGInious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with INGInious. If not, see <http://www.gnu.org/licenses/>.
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name """
for func in self.hooks.get(name, []):
func(**kwargs)
| # -*- coding: utf-8 -*-
#
# Copyright (c) 2014 Université Catholique de Louvain.
#
# This file is part of INGInious.
#
# INGInious is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INGInious is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with INGInious. If not, see <http://www.gnu.org/licenses/>.
""" Hook Manager """
class HookManager(object):
""" Registers an manages hooks. Hooks are callback functions called when the backend does a specific action. """
def __init__(self):
self.hooks = {}
def add_hook(self, name, callback):
""" Add a new hook that can be called with the call_hook function """
hook_list = self.hooks.get(name, [])
hook_list.append(callback)
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
""" Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
return map(lambda x: x(**kwargs), self.hooks.get(name, []))
| Allow hooks to return values (and simplify the code) | Allow hooks to return values (and simplify the code)
| Python | agpl-3.0 | layus/INGInious,layus/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious,layus/INGInious,GuillaumeDerval/INGInious,GuillaumeDerval/INGInious | ---
+++
@@ -33,6 +33,5 @@
self.hooks[name] = hook_list
def call_hook(self, name, **kwargs):
- """ Call all hooks registered with this name """
- for func in self.hooks.get(name, []):
- func(**kwargs)
+ """ Call all hooks registered with this name. Returns a list of the returns values of the hooks (in the order the hooks were added)"""
+ return map(lambda x: x(**kwargs), self.hooks.get(name, [])) |
bb77b9554108c6a9739dd058a12484d15f10d3a2 | candidates/views/helpers.py | candidates/views/helpers.py | from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for election, election_data in settings.ELECTIONS_BY_DATE:
if not election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
| from django.core.urlresolvers import reverse
from django.conf import settings
from django.http import HttpResponseRedirect
from slugify import slugify
from ..election_specific import AREA_POST_DATA
from ..models import (
PopItPerson, membership_covers_date
)
def get_redirect_to_post(election, post_data):
short_post_label = AREA_POST_DATA.shorten_post_label(
election, post_data['label']
)
return HttpResponseRedirect(
reverse(
'constituency',
kwargs={
'election': election,
'post_id': post_data['id'],
'ignored_slug': slugify(short_post_label),
}
)
)
def get_people_from_memberships(election_data, memberships):
current_candidates = set()
past_candidates = set()
for membership in memberships:
if not membership.get('role') == election_data['candidate_membership_role']:
continue
person = PopItPerson.create_from_dict(membership['person_id'])
if membership_covers_date(
membership,
election_data['election_date']
):
current_candidates.add(person)
else:
for other_election, other_election_data in settings.ELECTIONS_BY_DATE:
if not other_election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
other_election_data['election_date'],
):
past_candidates.add(person)
return current_candidates, past_candidates
| Fix a stupid bug in get_people_from_memberships | Fix a stupid bug in get_people_from_memberships
The rebinding of the function's election_data parameter was breaking
the listing of candidates for a post.
| Python | agpl-3.0 | openstate/yournextrepresentative,mysociety/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,datamade/yournextmp-popit,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,DemocracyClub/yournextrepresentative | ---
+++
@@ -37,12 +37,12 @@
):
current_candidates.add(person)
else:
- for election, election_data in settings.ELECTIONS_BY_DATE:
- if not election_data.get('use_for_candidate_suggestions'):
+ for other_election, other_election_data in settings.ELECTIONS_BY_DATE:
+ if not other_election_data.get('use_for_candidate_suggestions'):
continue
if membership_covers_date(
membership,
- election_data['election_date'],
+ other_election_data['election_date'],
):
past_candidates.add(person)
|
b2268ae4ecad477c46a4b99ec17511e2e535b9d0 | globus_cli/commands/task/generate_submission_id.py | globus_cli/commands/task/generate_submission_id.py | import click
from globus_cli.parsing import common_options
from globus_cli.safeio import FORMAT_TEXT_RAW, formatted_print
from globus_cli.services.transfer import get_client
@click.command(
"generate-submission-id",
short_help="Get a submission ID",
help=(
"Generate a new task submission ID for use in "
"`globus transfer` and `gloubs delete`. Submission IDs "
"allow you to safely retry submission of a task in the "
"presence of network errors. No matter how many times "
"you submit a task with a given ID, it will only be "
"accepted and executed once. The response status may "
"change between submissions."
),
)
@common_options
def generate_submission_id():
"""
Executor for `globus task generate-submission-id`
"""
client = get_client()
res = client.get_submission_id()
formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="value")
| import click
from globus_cli.parsing import common_options
from globus_cli.safeio import FORMAT_TEXT_RAW, formatted_print
from globus_cli.services.transfer import get_client
@click.command(
"generate-submission-id",
short_help="Get a submission ID",
help=(
"""\
Generate a new task submission ID for use in `globus transfer` and `gloubs delete`.
Submission IDs allow you to safely retry submission of a task in the presence of
network errors. No matter how many times you submit a task with a given ID, it will
only be accepted and executed once. The response status may change between
submissions.
\b
Important Note: Submission IDs are not the same as Task IDs.
"""
),
)
@common_options
def generate_submission_id():
"""
Executor for `globus task generate-submission-id`
"""
client = get_client()
res = client.get_submission_id()
formatted_print(res, text_format=FORMAT_TEXT_RAW, response_key="value")
| Clarify that submission ID != task ID | Clarify that submission ID != task ID
Calling this out in the helptext will hopefully help avoid people
conflating these two quite as easily. (An imperfect solution for an
imperfect world.)
| Python | apache-2.0 | globus/globus-cli,globus/globus-cli | ---
+++
@@ -9,13 +9,16 @@
"generate-submission-id",
short_help="Get a submission ID",
help=(
- "Generate a new task submission ID for use in "
- "`globus transfer` and `gloubs delete`. Submission IDs "
- "allow you to safely retry submission of a task in the "
- "presence of network errors. No matter how many times "
- "you submit a task with a given ID, it will only be "
- "accepted and executed once. The response status may "
- "change between submissions."
+ """\
+ Generate a new task submission ID for use in `globus transfer` and `gloubs delete`.
+ Submission IDs allow you to safely retry submission of a task in the presence of
+ network errors. No matter how many times you submit a task with a given ID, it will
+ only be accepted and executed once. The response status may change between
+ submissions.
+
+ \b
+ Important Note: Submission IDs are not the same as Task IDs.
+ """
),
)
@common_options |
2cb8db4ba908fe0e28dc95287d57fcb013d3a6cd | pmg/tasks.py | pmg/tasks.py | import logging
import newrelic.agent
log = logging.getLogger(__name__)
def send_saved_search_alerts():
from pmg import app
from pmg.models import SavedSearch
application = newrelic.agent.application()
with newrelic.agent.BackgroundTask(application, name='send_saved_search_alerts', group='Task'):
with app.app_context():
SavedSearch.send_all_alerts()
def sync_soundcloud():
from pmg import app
from pmg.models.soundcloud_track import SoundcloudTrack
application = newrelic.agent.application()
with newrelic.agent.BackgroundTask(application, name='sync_soundcloud', group='Task'):
with app.app_context():
SoundcloudTrack.sync()
def schedule():
from pmg import scheduler
# Schedule background task for sending saved search alerts every
# day at 3am (UTC)
jobs = [
scheduler.add_job('pmg.tasks:send_saved_search_alerts', 'cron',
id='send-saved-search-alerts', replace_existing=True,
coalesce=True, hour=3),
scheduler.add_job(sync_soundcloud, 'cron',
id='sync-soundcloud', replace_existing=True,
coalesce=True, minute='*/1'),
]
for job in jobs:
log.info("Scheduled task: %s" % job)
| import logging
import newrelic.agent
log = logging.getLogger(__name__)
def send_saved_search_alerts():
from pmg import app
from pmg.models import SavedSearch
application = newrelic.agent.application()
with newrelic.agent.BackgroundTask(application, name='send_saved_search_alerts', group='Task'):
with app.app_context():
SavedSearch.send_all_alerts()
def sync_soundcloud():
from pmg import app
from pmg.models.soundcloud_track import SoundcloudTrack
application = newrelic.agent.application()
with newrelic.agent.BackgroundTask(application, name='sync_soundcloud', group='Task'):
with app.app_context():
SoundcloudTrack.sync()
def schedule():
from pmg import scheduler
# Schedule background task for sending saved search alerts every
# day at 3am (UTC)
jobs = [
scheduler.add_job('pmg.tasks:send_saved_search_alerts', 'cron',
id='send-saved-search-alerts', replace_existing=True,
coalesce=True, hour=3),
scheduler.add_job(sync_soundcloud, 'cron',
id='sync-soundcloud', replace_existing=True,
coalesce=True, minute='*/20'),
]
for job in jobs:
log.info("Scheduled task: %s" % job)
| Change soundcloud sync to happen every 20 minutes | Change soundcloud sync to happen every 20 minutes
| Python | apache-2.0 | Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2 | ---
+++
@@ -33,7 +33,7 @@
coalesce=True, hour=3),
scheduler.add_job(sync_soundcloud, 'cron',
id='sync-soundcloud', replace_existing=True,
- coalesce=True, minute='*/1'),
+ coalesce=True, minute='*/20'),
]
for job in jobs:
log.info("Scheduled task: %s" % job) |
8f6672d9a92014c95deb687e53c146e17b66c880 | purchase_stock_picking_return_invoicing/__manifest__.py | purchase_stock_picking_return_invoicing/__manifest__.py | # Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com)
# Copyright 2017-2018 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Purchase Stock Picking Return Invoicing",
"summary": "Add an option to refund returned pickings",
"version": "14.0.1.0.0",
"category": "Purchases",
"website": "https://github.com/OCA/account-invoicing",
"author": "ForgeFlow, Tecnativa, Odoo Community Association (OCA)",
"license": "AGPL-3",
"installable": True,
"development_status": "Mature",
"depends": ["purchase_stock"],
"data": ["views/account_invoice_view.xml", "views/purchase_view.xml"],
"maintainers": ["pedrobaeza", "MiquelRForgeFlow"],
}
| # Copyright 2019 ForgeFlow S.L. (https://www.forgeflow.com)
# Copyright 2017-2018 Tecnativa - Pedro M. Baeza
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
"name": "Purchase Stock Picking Return Invoicing",
"summary": "Add an option to refund returned pickings",
"version": "14.0.1.1.0",
"category": "Purchases",
"website": "https://github.com/OCA/account-invoicing",
"author": "ForgeFlow, Tecnativa, Odoo Community Association (OCA)",
"license": "AGPL-3",
"installable": True,
"development_status": "Mature",
"depends": ["purchase_stock"],
"data": ["views/account_invoice_view.xml", "views/purchase_view.xml"],
"maintainers": ["pedrobaeza", "MiquelRForgeFlow"],
}
| Hide button on proper counter | [FIX] purchase_stock_picking_return_invoicing: Hide button on proper counter
We should hide the view refunds button when there are no refunds, not
when there aren't invoices.
It hasn't been detected till now, because you usually have an invoice
before doing a refund, but in case you include this purchase order
in a vendor refund, you won't get any vendor bill.
TT32388
| Python | agpl-3.0 | OCA/account-invoicing,OCA/account-invoicing | ---
+++
@@ -5,7 +5,7 @@
{
"name": "Purchase Stock Picking Return Invoicing",
"summary": "Add an option to refund returned pickings",
- "version": "14.0.1.0.0",
+ "version": "14.0.1.1.0",
"category": "Purchases",
"website": "https://github.com/OCA/account-invoicing",
"author": "ForgeFlow, Tecnativa, Odoo Community Association (OCA)", |
5d50ab9457ef52d3b0a4d75966143245ed5ca305 | sdks/python/apache_beam/runners/worker/channel_factory.py | sdks/python/apache_beam/runners/worker/channel_factory.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Factory to create grpc channel."""
# pytype: skip-file
import grpc
class GRPCChannelFactory(grpc.StreamStreamClientInterceptor):
DEFAULT_OPTIONS = [("grpc.keepalive_time_ms", 20000)]
def __init__(self):
pass
@staticmethod
def insecure_channel(target, options=None):
if options is None:
options = []
return grpc.insecure_channel(
target, options=options + GRPCChannelFactory.DEFAULT_OPTIONS)
@staticmethod
def secure_channel(target, credentials, options=None):
if options is None:
options = []
return grpc.secure_channel(
target,
credentials,
options=options + GRPCChannelFactory.DEFAULT_OPTIONS)
| #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Factory to create grpc channel."""
# pytype: skip-file
import grpc
class GRPCChannelFactory(grpc.StreamStreamClientInterceptor):
DEFAULT_OPTIONS = [
("grpc.keepalive_time_ms", 20000),
("grpc.keepalive_timeout_ms", 300000),
]
def __init__(self):
pass
@staticmethod
def insecure_channel(target, options=None):
if options is None:
options = []
return grpc.insecure_channel(
target, options=options + GRPCChannelFactory.DEFAULT_OPTIONS)
@staticmethod
def secure_channel(target, credentials, options=None):
if options is None:
options = []
return grpc.secure_channel(
target,
credentials,
options=options + GRPCChannelFactory.DEFAULT_OPTIONS)
| Increase keepalive timeout to 5 minutes. | Increase keepalive timeout to 5 minutes.
| Python | apache-2.0 | chamikaramj/beam,chamikaramj/beam,apache/beam,robertwb/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,apache/beam,lukecwik/incubator-beam,apache/beam,chamikaramj/beam,chamikaramj/beam,lukecwik/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,chamikaramj/beam,apache/beam,apache/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam,apache/beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,lukecwik/incubator-beam,apache/beam,robertwb/incubator-beam,chamikaramj/beam,lukecwik/incubator-beam,robertwb/incubator-beam,apache/beam,chamikaramj/beam,robertwb/incubator-beam,lukecwik/incubator-beam,apache/beam,chamikaramj/beam | ---
+++
@@ -22,7 +22,10 @@
class GRPCChannelFactory(grpc.StreamStreamClientInterceptor):
- DEFAULT_OPTIONS = [("grpc.keepalive_time_ms", 20000)]
+ DEFAULT_OPTIONS = [
+ ("grpc.keepalive_time_ms", 20000),
+ ("grpc.keepalive_timeout_ms", 300000),
+ ]
def __init__(self):
pass |
a5b3e991d128693e0d1f1ed6892c9c1c4a507a3d | src/apps/processing/ala/management/commands/ala_import.py | src/apps/processing/ala/management/commands/ala_import.py | import logging
from django.core.management.base import BaseCommand
from apps.processing.ala.util import util
from dateutil.parser import parse
from datetime import date, timedelta
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Import data from ALA stations, optionally you can pass date. Otherwise it will fetch yesterday data'
def add_arguments(self, parser):
parser.add_argument('date', nargs='?', type=parse, default=None)
def handle(self, *args, **options):
stations = util.get_or_create_stations()
day = options['date']
if day is None:
day = date.today() - timedelta(1)
logger.info(
'Importing observations of {} ALA stations from {}.'.format(
len(stations),
day
)
)
try:
for station in stations:
util.load(station, day)
util.create_avgs(station, day)
except Exception as e:
self.stdout.write(self.style.ERROR(e))
| import logging
from django.core.management.base import BaseCommand
from apps.processing.ala.util import util
from dateutil.parser import parse
from datetime import date, timedelta
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'Import data from ALA stations, optionally you can pass date. Otherwise it will fetch yesterday data'
def add_arguments(self, parser):
parser.add_argument('date', nargs='?', type=parse, default=None)
def handle(self, *args, **options):
stations = util.get_or_create_stations()
day = options['date']
if day is None:
day = date.today() - timedelta(2)
logger.info(
'Importing observations of {} ALA stations from {}.'.format(
len(stations),
day
)
)
try:
for station in stations:
util.load(station, day)
util.create_avgs(station, day)
except Exception as e:
self.stdout.write(self.style.ERROR(e))
| Make default ALA's date the day before yesterday | Make default ALA's date the day before yesterday
| Python | bsd-3-clause | gis4dis/poster,gis4dis/poster,gis4dis/poster | ---
+++
@@ -18,7 +18,7 @@
stations = util.get_or_create_stations()
day = options['date']
if day is None:
- day = date.today() - timedelta(1)
+ day = date.today() - timedelta(2)
logger.info(
'Importing observations of {} ALA stations from {}.'.format( |
1540d76aff3d01ea4d94eacab362a82458e65dd7 | AFQ/tests/test_dki.py | AFQ/tests/test_dki.py | from AFQ import dki
import numpy.testing as npt
def test_fit_dki_inputs():
data_files = ["String in a list"]
bval_files = "just a string"
bvec_files = "just another string"
npt.assert_raises(ValueError, dki.fit_dki, data_files, bval_files,
bvec_files)
| import tempfile
import numpy.testing as npt
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from AFQ import dki
def test_fit_dki_inputs():
data_files = ["String in a list"]
bval_files = "just a string"
bvec_files = "just another string"
npt.assert_raises(ValueError, dki.fit_dki, data_files, bval_files,
bvec_files)
def test_fit_dti():
fdata, fbval, fbvec = dpd.get_data('small_101D')
with nbtmp.InTemporaryDirectory() as tmpdir:
dki.fit_dki(fdata, fbval, fbvec, out_dir=tmpdir)
| Add smoke-testing of the DKI fit. | TST: Add smoke-testing of the DKI fit.
| Python | bsd-2-clause | yeatmanlab/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ | ---
+++
@@ -1,5 +1,10 @@
+import tempfile
+
+import numpy.testing as npt
+import nibabel.tmpdirs as nbtmp
+import dipy.data as dpd
+
from AFQ import dki
-import numpy.testing as npt
def test_fit_dki_inputs():
@@ -8,3 +13,8 @@
bvec_files = "just another string"
npt.assert_raises(ValueError, dki.fit_dki, data_files, bval_files,
bvec_files)
+
+def test_fit_dti():
+ fdata, fbval, fbvec = dpd.get_data('small_101D')
+ with nbtmp.InTemporaryDirectory() as tmpdir:
+ dki.fit_dki(fdata, fbval, fbvec, out_dir=tmpdir) |
522e5e04b2a75a1c4c863116d8ada8c04e122c1a | scheduler.py | scheduler.py | import logging
import os
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
raven_client = RavenClient()
logger = logging.getLogger(__name__)
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if os.getenv("TEST_SCHEDULE"):
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logger.info("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ:
logger.error("Missing at least one Slack environment variable.")
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logger.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not os.getenv('SENTRY_DSN'):
raise e
logger.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
| import logging
import os
from apscheduler.schedulers.blocking import BlockingScheduler
from raven.base import Client as RavenClient
import warner
import archiver
import announcer
import flagger
raven_client = RavenClient()
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if os.getenv("TEST_SCHEDULE"):
schedule_kwargs = {"hour": "*", "minute": "*/10"}
else:
schedule_kwargs = {"hour": 4}
sched = BlockingScheduler()
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
logging.info("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ:
logging.error("Missing at least one Slack environment variable.")
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not os.getenv('SENTRY_DSN'):
raise e
logging.info("END: destalinate_job")
if __name__ == "__main__":
sched.start()
| Use logging directly in main | Use logging directly in main
| Python | apache-2.0 | royrapoport/destalinator,TheConnMan/destalinator,randsleadershipslack/destalinator,royrapoport/destalinator,TheConnMan/destalinator,randsleadershipslack/destalinator | ---
+++
@@ -12,8 +12,6 @@
raven_client = RavenClient()
-logger = logging.getLogger(__name__)
-
# When testing changes, set the "TEST_SCHEDULE" envvar to run more often
if os.getenv("TEST_SCHEDULE"):
schedule_kwargs = {"hour": "*", "minute": "*/10"}
@@ -25,21 +23,21 @@
@sched.scheduled_job("cron", **schedule_kwargs)
def destalinate_job():
- logger.info("Destalinating")
+ logging.info("Destalinating")
if "SB_TOKEN" not in os.environ or "API_TOKEN" not in os.environ:
- logger.error("Missing at least one Slack environment variable.")
+ logging.error("Missing at least one Slack environment variable.")
else:
try:
warner.Warner().warn()
archiver.Archiver().archive()
announcer.Announcer().announce()
flagger.Flagger().flag()
- logger.info("OK: destalinated")
+ logging.info("OK: destalinated")
except Exception as e: # pylint: disable=W0703
raven_client.captureException()
if not os.getenv('SENTRY_DSN'):
raise e
- logger.info("END: destalinate_job")
+ logging.info("END: destalinate_job")
if __name__ == "__main__": |
98bf53dd350869e31c89f14cb0ebfa6a467dd0ec | events/migrations/0017_auto_20160208_1729.py | events/migrations/0017_auto_20160208_1729.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-08 15:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0016_auto_20160205_1754'),
]
operations = [
migrations.AlterField(
model_name='image',
name='url',
field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-02-08 15:29
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('events', '0016_auto_20160205_1754'),
]
operations = [
migrations.AlterField(
model_name='event',
name='external_image_url',
field=models.URLField(blank=True, max_length=400, null=True),
),
migrations.AlterField(
model_name='image',
name='url',
field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'),
),
]
| Revert "Remove redundant migration operation." | Revert "Remove redundant migration operation."
This reverts commit 9d34264d275acd32122de3567e60b24a417d6098.
| Python | mit | City-of-Helsinki/linkedevents,aapris/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents,tuomas777/linkedevents,aapris/linkedevents | ---
+++
@@ -13,6 +13,11 @@
operations = [
migrations.AlterField(
+ model_name='event',
+ name='external_image_url',
+ field=models.URLField(blank=True, max_length=400, null=True),
+ ),
+ migrations.AlterField(
model_name='image',
name='url',
field=models.URLField(blank=True, max_length=400, null=True, verbose_name='Image'), |
4562f84191cde7b43ffb7e028eb33996789b1ea4 | foomodules/link_harvester/common_handlers.py | foomodules/link_harvester/common_handlers.py | import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if not WURSTBALL_RE.match(metadata.url):
return None
ret = default_handler(metadata)
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| import logging
import re
import socket
import urllib
from bs4 import BeautifulSoup
logger = logging.getLogger(__name__)
WURSTBALL_RE = re.compile("^http[s]://wurstball.de/[0-9]+/")
def default_handler(metadata):
return {key: getattr(metadata, key) for key in
["original_url", "url", "title", "description",
"human_readable_type"]}
def wurstball_handler(metadata):
if not WURSTBALL_RE.match(metadata.url):
return None
ret = {
"human_readable_type": metadata.human_readable_type,
"url": metadata.url,
"original_url": metadata.original_url,
"title": None,
"description": None
}
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"]
try:
response = urllib.request.urlopen(img_url, timeout=5)
img_data = response.read()
except (socket.timeout,
urllib.error.URLError,
urllib.error.HTTPError) as err:
logger.warn("Could not download Wurstball image: {}".format(err))
return ret
mime_type = response.getheader("Content-Type")
ret.update({"image_mime_type": mime_type,
"image_buffer": img_data,
"image_url": img_url})
return ret
| Discard title and description from wurstball | Discard title and description from wurstball
| Python | mit | horazont/xmpp-crowd | ---
+++
@@ -18,7 +18,13 @@
if not WURSTBALL_RE.match(metadata.url):
return None
- ret = default_handler(metadata)
+ ret = {
+ "human_readable_type": metadata.human_readable_type,
+ "url": metadata.url,
+ "original_url": metadata.original_url,
+ "title": None,
+ "description": None
+ }
soup = BeautifulSoup(metadata.buf)
img_url = soup.find(id="content-main").img["src"] |
487382ab2aafe1c92aa64192432379b4f3809732 | pywkeeper.py | pywkeeper.py | from Crypto.Cipher import AES
import json
import os
KEY_LENGTH = 256
BLOCK_LENGTH = 16
KEY_FILE = 'key'
ENCRYPTED_FILE = 'data'
DECRYPTED_FILE = 'tmp'
EOT_CHAR = '\x04'
def get_cipher(iv):
try:
key = open(KEY_FILE, 'rb').read()
except IOError:
key = input("Please enter the decryption key: ")
return AES.new(key, AES.MODE_CBC, iv)
def encrypt():
bytes = multiple_of(open(DECRYPTED_FILE, 'rt').read().encode(), BLOCK_LENGTH)
iv = os.urandom(16)
c = get_cipher(iv)
return (iv, c.encrypt(bytes))
def decrypt():
bytes = open(ENCRYPTED_FILE, 'rb').read()
c = get_cipher(bytes[:16])
return c.decrypt(bytes).decode('utf-8')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length))).encode()
| #!/usr/bin/env python
from Crypto.Cipher import AES
import json
import os
import optparse
KEY_LENGTH = 256
BLOCK_LENGTH = 16
KEY_FILE = 'key'
ENCRYPTED_FILE = 'data'
DECRYPTED_FILE = 'tmp'
EOT_CHAR = '\x04'
def main(options, arguments):
pass
def get_cipher(iv):
try:
key = open(KEY_FILE, 'rb').read()
except IOError:
key = input("Please enter the decryption key: ")
return AES.new(key, AES.MODE_CBC, iv)
def encrypt():
bytes = multiple_of(open(DECRYPTED_FILE, 'rt').read().encode(), BLOCK_LENGTH)
iv = os.urandom(16)
c = get_cipher(iv)
return (iv, c.encrypt(bytes))
def decrypt():
bytes = open(ENCRYPTED_FILE, 'rb').read()
c = get_cipher(bytes[:16])
return c.decrypt(bytes).decode('utf-8')
def multiple_of(bytes, length):
if len(bytes) % length == 0:
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length))).encode()
if __name__ == '__main__':
p = optparse.OptionParser()
options, arguments = p.parse_args()
main(options, arguments)
| Add option parser and main method | Add option parser and main method
| Python | unlicense | kvikshaug/pwkeeper | ---
+++
@@ -1,6 +1,8 @@
+#!/usr/bin/env python
from Crypto.Cipher import AES
import json
import os
+import optparse
KEY_LENGTH = 256
BLOCK_LENGTH = 16
@@ -10,6 +12,9 @@
DECRYPTED_FILE = 'tmp'
EOT_CHAR = '\x04'
+
+def main(options, arguments):
+ pass
def get_cipher(iv):
try:
@@ -34,3 +39,8 @@
return bytes
else:
return bytes + (EOT_CHAR * (length - (len(bytes) % length))).encode()
+
+if __name__ == '__main__':
+ p = optparse.OptionParser()
+ options, arguments = p.parse_args()
+ main(options, arguments) |
8548befae1c2fa2f0e607a39917f789c0a078235 | Misc/Vim/syntax_test.py | Misc/Vim/syntax_test.py | """Test file for syntax highlighting of editors.
Meant to cover a wide range of different types of statements and expressions.
Not necessarily sensical.
"""
assert True
def foo(): pass
foo() # Uncoloured
while False: pass
1 and 2
if False: pass
from sys import path
# Comment
# XXX catch your attention
'single-quote', u'unicode'
"double-quote"
"""triple double-quote"""
'''triple single-quote'''
r'raw'
ur'unicode raw'
'escape\n'
'\04' # octal
'\xFF' # hex
'\u1111' # unicode character
1
1L
1.0
.1
1+2j
[] # Uncoloured
{} # Uncoloured
() # Uncoloured
all
GeneratorExit
trailing_whitespace = path
| """Test file for syntax highlighting of editors.
Meant to cover a wide range of different types of statements and expressions.
Not necessarily sensical or comprehensive (assume that if one exception is
highlighted that all are, for instance).
Highlighting extraneous whitespace at the end of the line is not represented
here as all trailing whitespace is automatically removed from .py files in the
repository.
"""
# Comment
# OPTIONAL: XXX catch your attention
# Statements
assert True # keyword
def foo(): # function definition
return []
class Bar(object): # Class definition
pass
foo() # UNCOLOURED: function call
while False: # 'while'
continue
for x in foo(): # 'for'
break
if False: pass # 'if'
elif False: pass
else False: pass
from sys import path as thing # Import
# Constants
'single-quote', u'unicode' # Strings of all kinds; prefixes not highlighted
"double-quote"
"""triple double-quote"""
'''triple single-quote'''
r'raw'
ur'unicode raw'
'escape\n'
'\04' # octal
'\xFF' # hex
'\u1111' # unicode character
1 # Integral
1L
1.0 # Float
.1
1+2j # Complex
# Expressions
1 and 2 or 3 # Boolean operators
2 < 3 # UNCOLOURED: comparison operators
spam = 42 # UNCOLOURED: assignment
2 + 3 # UNCOLOURED: number operators
[] # UNCOLOURED: list
{} # UNCOLOURED: dict
(1,) # UNCOLOURED: tuple
all # Built-in functions
GeneratorExit # Exceptions
| Remove line meant to test trailing whitespace since that kind of whitespace is automatically removed. | Remove line meant to test trailing whitespace since that kind of whitespace is
automatically removed.
Also annotate what each line is meant to test.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | ---
+++
@@ -1,19 +1,35 @@
"""Test file for syntax highlighting of editors.
Meant to cover a wide range of different types of statements and expressions.
-Not necessarily sensical.
+Not necessarily sensical or comprehensive (assume that if one exception is
+highlighted that all are, for instance).
+
+Highlighting extraneous whitespace at the end of the line is not represented
+here as all trailing whitespace is automatically removed from .py files in the
+repository.
"""
-assert True
-def foo(): pass
-foo() # Uncoloured
-while False: pass
-1 and 2
-if False: pass
-from sys import path
# Comment
-# XXX catch your attention
-'single-quote', u'unicode'
+# OPTIONAL: XXX catch your attention
+
+# Statements
+assert True # keyword
+def foo(): # function definition
+ return []
+class Bar(object): # Class definition
+ pass
+foo() # UNCOLOURED: function call
+while False: # 'while'
+ continue
+for x in foo(): # 'for'
+ break
+if False: pass # 'if'
+elif False: pass
+else False: pass
+from sys import path as thing # Import
+
+# Constants
+'single-quote', u'unicode' # Strings of all kinds; prefixes not highlighted
"double-quote"
"""triple double-quote"""
'''triple single-quote'''
@@ -23,14 +39,19 @@
'\04' # octal
'\xFF' # hex
'\u1111' # unicode character
-1
+1 # Integral
1L
-1.0
+1.0 # Float
.1
-1+2j
-[] # Uncoloured
-{} # Uncoloured
-() # Uncoloured
-all
-GeneratorExit
-trailing_whitespace = path
+1+2j # Complex
+
+# Expressions
+1 and 2 or 3 # Boolean operators
+2 < 3 # UNCOLOURED: comparison operators
+spam = 42 # UNCOLOURED: assignment
+2 + 3 # UNCOLOURED: number operators
+[] # UNCOLOURED: list
+{} # UNCOLOURED: dict
+(1,) # UNCOLOURED: tuple
+all # Built-in functions
+GeneratorExit # Exceptions |
fdf0d3240f7c2ccdfdd65d223f7949c98c9dc527 | multi_import/exporter.py | multi_import/exporter.py | import tablib
from multi_import.fields import FieldHelper
__all__ = [
'Exporter',
]
class Exporter(FieldHelper):
def __init__(self, queryset, serializer):
self.queryset = queryset
self.serializer = serializer()
def export_dataset(self, template=False):
dataset = tablib.Dataset(headers=self.get_header())
if not template:
for instance in self.queryset:
dataset.append(self.get_row(instance))
return dataset
def get_header(self):
return self.serializer.get_fields().keys()
def get_row(self, instance):
results = []
representation = self.serializer.to_representation(instance=instance)
for column_name, value in representation.items():
field = self.serializer.fields[column_name]
results.append(
self.to_string_representation(field, value)
)
return results
| import tablib
from multi_import.fields import FieldHelper
__all__ = [
'Exporter',
]
class Exporter(FieldHelper):
def __init__(self, queryset, serializer):
self.queryset = queryset
self.serializer = serializer()
def export_dataset(self, template=False):
dataset = tablib.Dataset(headers=self.get_header())
if not template:
for instance in self.queryset:
dataset.append(self.get_row(instance))
return dataset
def get_header(self):
return self.serializer.get_fields().keys()
def escape(self, s):
"""
This escape method will prevent csv macro injection.
When excel sees a space, it treats the contents as a string, therefore preventing formulas from running.
"""
blacklist = ['=', '+', '-', '@']
if s and s[0] in blacklist:
s = ' ' + s
return s
def get_row(self, instance):
results = []
representation = self.serializer.to_representation(instance=instance)
for column_name, value in representation.items():
field = self.serializer.fields[column_name]
results.append(
self.escape(self.to_string_representation(field, value))
)
return results
| Fix for formula interpretation triggering in Excel | Fix for formula interpretation triggering in Excel
| Python | mit | sdelements/django-multi-import | ---
+++
@@ -25,12 +25,24 @@
def get_header(self):
return self.serializer.get_fields().keys()
+ def escape(self, s):
+ """
+ This escape method will prevent csv macro injection.
+ When excel sees a space, it treats the contents as a string, therefore preventing formulas from running.
+ """
+ blacklist = ['=', '+', '-', '@']
+
+ if s and s[0] in blacklist:
+ s = ' ' + s
+
+ return s
+
def get_row(self, instance):
results = []
representation = self.serializer.to_representation(instance=instance)
for column_name, value in representation.items():
field = self.serializer.fields[column_name]
results.append(
- self.to_string_representation(field, value)
+ self.escape(self.to_string_representation(field, value))
)
return results |
09ebeb873c83d51053ef6aa2d7c6ce47b4be5070 | ckanext/archiver/helpers.py | ckanext/archiver/helpers.py | from ckan.plugins import toolkit as tk
def archiver_resource_show(resource_id):
data_dict = {'id': resource_id}
return tk.get_action('archiver_resource_show')(data_dict)
def archiver_is_resource_broken_html(resource):
archival = resource.get('archiver')
if not archival:
return '<!-- No archival info for this resource -->'
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal(
tk.render('archiver/is_resource_broken.html',
extra_vars=extra_vars))
def archiver_is_resource_cached_html(resource):
archival = resource.get('archiver')
if not archival:
return '<!-- No archival info for this resource -->'
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal(
tk.render('archiver/is_resource_cached.html',
extra_vars=extra_vars))
# Replacement for the core ckan helper 'format_resource_items'
# but with our own blacklist
def archiver_format_resource_items(items):
blacklist = ['archiver', 'qa']
items_ = [item for item in items
if item[0] not in blacklist]
import ckan.lib.helpers as ckan_helpers
return ckan_helpers.format_resource_items(items_)
| from ckan.plugins import toolkit as tk
def archiver_resource_show(resource_id):
data_dict = {'id': resource_id}
return tk.get_action('archiver_resource_show')(data_dict)
def archiver_is_resource_broken_html(resource):
archival = resource.get('archiver')
if not archival:
return tk.literal('<!-- No archival info for this resource -->')
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal(
tk.render('archiver/is_resource_broken.html',
extra_vars=extra_vars))
def archiver_is_resource_cached_html(resource):
archival = resource.get('archiver')
if not archival:
return tk.literal('<!-- No archival info for this resource -->')
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal(
tk.render('archiver/is_resource_cached.html',
extra_vars=extra_vars))
# Replacement for the core ckan helper 'format_resource_items'
# but with our own blacklist
def archiver_format_resource_items(items):
blacklist = ['archiver', 'qa']
items_ = [item for item in items
if item[0] not in blacklist]
import ckan.lib.helpers as ckan_helpers
return ckan_helpers.format_resource_items(items_)
| Hide comments meant as unseen | Hide comments meant as unseen
| Python | mit | ckan/ckanext-archiver,datagovuk/ckanext-archiver,datagovuk/ckanext-archiver,ckan/ckanext-archiver,datagovuk/ckanext-archiver,ckan/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver,DanePubliczneGovPl/ckanext-archiver | ---
+++
@@ -9,7 +9,7 @@
def archiver_is_resource_broken_html(resource):
archival = resource.get('archiver')
if not archival:
- return '<!-- No archival info for this resource -->'
+ return tk.literal('<!-- No archival info for this resource -->')
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal(
@@ -20,7 +20,7 @@
def archiver_is_resource_cached_html(resource):
archival = resource.get('archiver')
if not archival:
- return '<!-- No archival info for this resource -->'
+ return tk.literal('<!-- No archival info for this resource -->')
extra_vars = {'resource': resource}
extra_vars.update(archival)
return tk.literal( |
4ebc13ac1913dfe3fcd7bdb7c7235b7b88718574 | fastdraw/api/commands.py | fastdraw/api/commands.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2014 PolyBeacon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2014 PolyBeacon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
def answer(delay=0):
"""Answer the channel.
:param delay: The number of milliseconds to wait before moving to the next
priority.
:type delay: int
"""
res = 'same => n,Answer(%d)' % delay
return res
def goto(context, exten='s', priority=1):
"""Goto another point in the dialplan
:param context: The context or label to jump to
:type context: string
:param exten: The extension within that context to goto (default: s)
:type exten: string
:param priority: The line within the extension (default: 1)
:type priority: int
"""
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res
def hangup(cause=''):
"""Hangup the calling channel.
:param cause: Hangup cause code to use for the channel.
:type cause: str
"""
res = 'same => n,Hangup(%s)' % cause
return res
| Add documentation for goto command | Add documentation for goto command
Change-Id: I94e280eef509abe65f552b6e78f21eabfe4192e3
Signed-off-by: Sarah Liske <e262b8a15d521183e33ead305fd79e90e1942cdd@polybeacon.com>
| Python | apache-2.0 | kickstandproject/fastdraw | ---
+++
@@ -28,6 +28,15 @@
def goto(context, exten='s', priority=1):
+ """Goto another point in the dialplan
+
+ :param context: The context or label to jump to
+ :type context: string
+ :param exten: The extension within that context to goto (default: s)
+ :type exten: string
+ :param priority: The line within the extension (default: 1)
+ :type priority: int
+ """
res = 'same => n,Goto(%s,%s,%d)' % (context, exten, priority)
return res |
d587cc4f87276be1a95966a82293c4bc7bb02e26 | experiments/tf_trainer/common/base_model.py | experiments/tf_trainer/common/base_model.py | """Interface for Models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import tensorflow as tf
from keras import models
from tf_trainer.common import text_preprocessor
from tf_trainer.common import types
from typing import Callable
class BaseModel(abc.ABC):
"""Tentative interface for all model classes.
Although the code doesn't take advantage of this interface yet, all models
should subclass this one.
"""
@staticmethod
def create(
estimator_fn: Callable[[str], tf.estimator.Estimator]) -> 'BaseModel':
class Model(BaseModel):
def estimator(model_dir):
return estimator_fn(model_dir)
return Model()
@abc.abstractmethod
def estimator(self, model_dir: str) -> tf.estimator.Estimator:
pass
| """Interface for Models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import abc
import tensorflow as tf
from keras import models
from tf_trainer.common import text_preprocessor
from tf_trainer.common import types
from typing import Callable
class BaseModel(abc.ABC):
"""Tentative interface for all model classes.
Although the code doesn't take advantage of this interface yet, all models
should subclass this one.
"""
@staticmethod
def create(
estimator_fn: Callable[[str], tf.estimator.Estimator]) -> 'BaseModel':
class Model(BaseModel):
def estimator(self, model_dir):
return estimator_fn(model_dir)
return Model()
@abc.abstractmethod
def estimator(self, model_dir: str) -> tf.estimator.Estimator:
pass
| Correct method signature for BaseModel create | Correct method signature for BaseModel create
| Python | apache-2.0 | conversationai/conversationai-models,conversationai/conversationai-models | ---
+++
@@ -26,7 +26,7 @@
class Model(BaseModel):
- def estimator(model_dir):
+ def estimator(self, model_dir):
return estimator_fn(model_dir)
return Model() |
f0cf3234880007c548bad894be8a0df797387da7 | common/lib/xmodule/setup.py | common/lib/xmodule/setup.py | from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"course = seq_module:SequenceDescriptor",
"html = html_module:HtmlModuleDescriptor",
]
}
)
| from setuptools import setup, find_packages
setup(
name="XModule",
version="0.1",
packages=find_packages(),
install_requires=['distribute'],
package_data={
'': ['js/*']
},
# See http://guide.python-distribute.org/creation.html#entry-points
# for a description of entry_points
entry_points={
'xmodule.v1': [
"chapter = seq_module:SequenceDescriptor",
"course = seq_module:SequenceDescriptor",
"html = html_module:HtmlModuleDescriptor",
]
}
)
| Handle chapters as sequence modules | Handle chapters as sequence modules
| Python | agpl-3.0 | cselis86/edx-platform,leansoft/edx-platform,dsajkl/reqiop,jazkarta/edx-platform-for-isc,Unow/edx-platform,jazkarta/edx-platform,appliedx/edx-platform,ferabra/edx-platform,a-parhom/edx-platform,romain-li/edx-platform,mjg2203/edx-platform-seas,antoviaque/edx-platform,fintech-circle/edx-platform,ampax/edx-platform-backup,caesar2164/edx-platform,Stanford-Online/edx-platform,doganov/edx-platform,simbs/edx-platform,chand3040/cloud_that,eemirtekin/edx-platform,morpheby/levelup-by,etzhou/edx-platform,auferack08/edx-platform,franosincic/edx-platform,EduPepperPDTesting/pepper2013-testing,IndonesiaX/edx-platform,beacloudgenius/edx-platform,alexthered/kienhoc-platform,UOMx/edx-platform,itsjeyd/edx-platform,nikolas/edx-platform,TeachAtTUM/edx-platform,nttks/jenkins-test,cselis86/edx-platform,UXE/local-edx,LICEF/edx-platform,jzoldak/edx-platform,ahmedaljazzar/edx-platform,chudaol/edx-platform,devs1991/test_edx_docmode,shashank971/edx-platform,CredoReference/edx-platform,nttks/jenkins-test,waheedahmed/edx-platform,morpheby/levelup-by,ahmadiga/min_edx,jruiperezv/ANALYSE,Kalyzee/edx-platform,bitifirefly/edx-platform,CredoReference/edx-platform,dcosentino/edx-platform,gsehub/edx-platform,chauhanhardik/populo_2,edx-solutions/edx-platform,zadgroup/edx-platform,shubhdev/edxOnBaadal,naresh21/synergetics-edx-platform,shubhdev/openedx,playm2mboy/edx-platform,olexiim/edx-platform,Livit/Livit.Learn.EdX,pepeportela/edx-platform,dkarakats/edx-platform,Shrhawk/edx-platform,morenopc/edx-platform,zofuthan/edx-platform,RPI-OPENEDX/edx-platform,atsolakid/edx-platform,eduNEXT/edx-platform,4eek/edx-platform,RPI-OPENEDX/edx-platform,mbareta/edx-platform-ft,y12uc231/edx-platform,RPI-OPENEDX/edx-platform,etzhou/edx-platform,EduPepperPD/pepper2013,xuxiao19910803/edx-platform,Kalyzee/edx-platform,antonve/s4-project-mooc,shabab12/edx-platform,fintech-circle/edx-platform,yokose-ks/edx-platform,cyanna/edx-platform,Stanford-Online/edx-platform,nagyistoce/edx-platform,longmen21/edx-platform,gymnasium/edx-platform,chand3040/cloud_that,JCBarahona/edX,simbs/edx-platform,romain-li/edx-platform,pdehaye/theming-edx-platform,gymnasium/edx-platform,dcosentino/edx-platform,philanthropy-u/edx-platform,cyanna/edx-platform,rhndg/openedx,SivilTaram/edx-platform,nagyistoce/edx-platform,atsolakid/edx-platform,sameetb-cuelogic/edx-platform-test,ZLLab-Mooc/edx-platform,PepperPD/edx-pepper-platform,antonve/s4-project-mooc,apigee/edx-platform,jazztpt/edx-platform,jswope00/griffinx,WatanabeYasumasa/edx-platform,IITBinterns13/edx-platform-dev,mjirayu/sit_academy,ubc/edx-platform,ampax/edx-platform-backup,doganov/edx-platform,AkA84/edx-platform,kxliugang/edx-platform,xuxiao19910803/edx-platform,mitocw/edx-platform,morpheby/levelup-by,LearnEra/LearnEraPlaftform,ahmedaljazzar/edx-platform,JCBarahona/edX,mitocw/edx-platform,hkawasaki/kawasaki-aio8-2,syjeon/new_edx,LearnEra/LearnEraPlaftform,shubhdev/edx-platform,pdehaye/theming-edx-platform,pomegranited/edx-platform,bigdatauniversity/edx-platform,hamzehd/edx-platform,synergeticsedx/deployment-wipro,knehez/edx-platform,appsembler/edx-platform,Softmotions/edx-platform,abdoosh00/edx-rtl-final,kamalx/edx-platform,JCBarahona/edX,knehez/edx-platform,raccoongang/edx-platform,AkA84/edx-platform,angelapper/edx-platform,nanolearningllc/edx-platform-cypress,mitocw/edx-platform,a-parhom/edx-platform,andyzsf/edx,edry/edx-platform,edry/edx-platform,nanolearningllc/edx-platform-cypress-2,utecuy/edx-platform,wwj718/ANALYSE,JCBarahona/edX,Kalyzee/edx-platform,JioEducation/edx-platform,zerobatu/edx-platform,gsehub/edx-platform,fly19890211/edx-platform,caesar2164/edx-platform,cpennington/edx-platform,pelikanchik/edx-platform,rhndg/openedx,longmen21/edx-platform,Edraak/edraak-platform,Unow/edx-platform,SravanthiSinha/edx-platform,caesar2164/edx-platform,B-MOOC/edx-platform,openfun/edx-platform,kamalx/edx-platform,Shrhawk/edx-platform,Endika/edx-platform,WatanabeYasumasa/edx-platform,SivilTaram/edx-platform,vismartltd/edx-platform,jonathan-beard/edx-platform,Lektorium-LLC/edx-platform,hkawasaki/kawasaki-aio8-0,deepsrijit1105/edx-platform,antoviaque/edx-platform,chand3040/cloud_that,etzhou/edx-platform,dsajkl/123,apigee/edx-platform,motion2015/a3,nikolas/edx-platform,tanmaykm/edx-platform,inares/edx-platform,jamiefolsom/edx-platform,cecep-edu/edx-platform,Shrhawk/edx-platform,defance/edx-platform,DefyVentures/edx-platform,arbrandes/edx-platform,leansoft/edx-platform,nttks/jenkins-test,simbs/edx-platform,procangroup/edx-platform,rhndg/openedx,ubc/edx-platform,auferack08/edx-platform,chrisndodge/edx-platform,chudaol/edx-platform,xuxiao19910803/edx,pelikanchik/edx-platform,eestay/edx-platform,Edraak/edraak-platform,cselis86/edx-platform,10clouds/edx-platform,tanmaykm/edx-platform,J861449197/edx-platform,TsinghuaX/edx-platform,iivic/BoiseStateX,nanolearningllc/edx-platform-cypress,halvertoluke/edx-platform,shubhdev/edx-platform,MSOpenTech/edx-platform,morenopc/edx-platform,etzhou/edx-platform,OmarIthawi/edx-platform,eestay/edx-platform,wwj718/ANALYSE,pomegranited/edx-platform,EduPepperPDTesting/pepper2013-testing,jazkarta/edx-platform-for-isc,kamalx/edx-platform,bitifirefly/edx-platform,AkA84/edx-platform,ubc/edx-platform,atsolakid/edx-platform,peterm-itr/edx-platform,dkarakats/edx-platform,knehez/edx-platform,olexiim/edx-platform,teltek/edx-platform,inares/edx-platform,jbzdak/edx-platform,abdoosh00/edraak,IONISx/edx-platform,tiagochiavericosta/edx-platform,DNFcode/edx-platform,PepperPD/edx-pepper-platform,hamzehd/edx-platform,IONISx/edx-platform,cyanna/edx-platform,BehavioralInsightsTeam/edx-platform,xingyepei/edx-platform,jazkarta/edx-platform-for-isc,rationalAgent/edx-platform-custom,jswope00/griffinx,hkawasaki/kawasaki-aio8-2,IndonesiaX/edx-platform,dsajkl/123,openfun/edx-platform,angelapper/edx-platform,hkawasaki/kawasaki-aio8-1,beacloudgenius/edx-platform,wwj718/ANALYSE,inares/edx-platform,eestay/edx-platform,valtech-mooc/edx-platform,jolyonb/edx-platform,ahmadio/edx-platform,CourseTalk/edx-platform,CredoReference/edx-platform,UXE/local-edx,synergeticsedx/deployment-wipro,Ayub-Khan/edx-platform,shashank971/edx-platform,iivic/BoiseStateX,louyihua/edx-platform,lduarte1991/edx-platform,lduarte1991/edx-platform,jzoldak/edx-platform,wwj718/edx-platform,jjmiranda/edx-platform,jruiperezv/ANALYSE,jswope00/griffinx,zerobatu/edx-platform,y12uc231/edx-platform,jelugbo/tundex,shashank971/edx-platform,ampax/edx-platform-backup,EduPepperPD/pepper2013,chudaol/edx-platform,Unow/edx-platform,jonathan-beard/edx-platform,mtlchun/edx,xuxiao19910803/edx,polimediaupv/edx-platform,carsongee/edx-platform,ahmedaljazzar/edx-platform,kursitet/edx-platform,naresh21/synergetics-edx-platform,chrisndodge/edx-platform,jswope00/GAI,jonathan-beard/edx-platform,Ayub-Khan/edx-platform,edry/edx-platform,abdoosh00/edraak,bdero/edx-platform,torchingloom/edx-platform,appsembler/edx-platform,EduPepperPD/pepper2013,ak2703/edx-platform,arifsetiawan/edx-platform,abdoosh00/edx-rtl-final,sameetb-cuelogic/edx-platform-test,xingyepei/edx-platform,antonve/s4-project-mooc,SivilTaram/edx-platform,WatanabeYasumasa/edx-platform,jbzdak/edx-platform,vikas1885/test1,antonve/s4-project-mooc,kmoocdev/edx-platform,eemirtekin/edx-platform,torchingloom/edx-platform,mjg2203/edx-platform-seas,wwj718/edx-platform,jamiefolsom/edx-platform,ampax/edx-platform-backup,antonve/s4-project-mooc,sameetb-cuelogic/edx-platform-test,xingyepei/edx-platform,shurihell/testasia,PepperPD/edx-pepper-platform,mahendra-r/edx-platform,beacloudgenius/edx-platform,don-github/edx-platform,nttks/edx-platform,hamzehd/edx-platform,vismartltd/edx-platform,ferabra/edx-platform,don-github/edx-platform,SravanthiSinha/edx-platform,bigdatauniversity/edx-platform,kamalx/edx-platform,sameetb-cuelogic/edx-platform-test,nanolearningllc/edx-platform-cypress-2,edx/edx-platform,appsembler/edx-platform,valtech-mooc/edx-platform,itsjeyd/edx-platform,xuxiao19910803/edx-platform,EduPepperPD/pepper2013,jazkarta/edx-platform-for-isc,shubhdev/openedx,hastexo/edx-platform,sudheerchintala/LearnEraPlatForm,devs1991/test_edx_docmode,solashirai/edx-platform,IndonesiaX/edx-platform,caesar2164/edx-platform,ak2703/edx-platform,shubhdev/edxOnBaadal,morenopc/edx-platform,mbareta/edx-platform-ft,cecep-edu/edx-platform,msegado/edx-platform,rismalrv/edx-platform,ahmadiga/min_edx,waheedahmed/edx-platform,prarthitm/edxplatform,shubhdev/openedx,dkarakats/edx-platform,unicri/edx-platform,procangroup/edx-platform,utecuy/edx-platform,Stanford-Online/edx-platform,ZLLab-Mooc/edx-platform,vasyarv/edx-platform,ZLLab-Mooc/edx-platform,eduNEXT/edx-platform,doganov/edx-platform,rhndg/openedx,Softmotions/edx-platform,louyihua/edx-platform,solashirai/edx-platform,louyihua/edx-platform,rationalAgent/edx-platform-custom,arifsetiawan/edx-platform,kxliugang/edx-platform,motion2015/edx-platform,Edraak/edx-platform,analyseuc3m/ANALYSE-v1,mahendra-r/edx-platform,JioEducation/edx-platform,AkA84/edx-platform,pku9104038/edx-platform,zadgroup/edx-platform,vasyarv/edx-platform,iivic/BoiseStateX,atsolakid/edx-platform,tiagochiavericosta/edx-platform,devs1991/test_edx_docmode,arifsetiawan/edx-platform,xinjiguaike/edx-platform,kmoocdev2/edx-platform,hmcmooc/muddx-platform,chauhanhardik/populo_2,beni55/edx-platform,Endika/edx-platform,martynovp/edx-platform,vikas1885/test1,kmoocdev2/edx-platform,vikas1885/test1,SivilTaram/edx-platform,MSOpenTech/edx-platform,torchingloom/edx-platform,pabloborrego93/edx-platform,msegado/edx-platform,motion2015/a3,hastexo/edx-platform,nttks/edx-platform,DNFcode/edx-platform,TsinghuaX/edx-platform,mushtaqak/edx-platform,BehavioralInsightsTeam/edx-platform,DefyVentures/edx-platform,jamesblunt/edx-platform,Edraak/edx-platform,mtlchun/edx,fly19890211/edx-platform,cognitiveclass/edx-platform,bdero/edx-platform,jelugbo/tundex,yokose-ks/edx-platform,proversity-org/edx-platform,martynovp/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,alu042/edx-platform,valtech-mooc/edx-platform,dkarakats/edx-platform,proversity-org/edx-platform,rationalAgent/edx-platform-custom,MakeHer/edx-platform,unicri/edx-platform,yokose-ks/edx-platform,EduPepperPDTesting/pepper2013-testing,hkawasaki/kawasaki-aio8-0,JioEducation/edx-platform,mcgachey/edx-platform,praveen-pal/edx-platform,xinjiguaike/edx-platform,hkawasaki/kawasaki-aio8-1,kmoocdev/edx-platform,kursitet/edx-platform,vismartltd/edx-platform,inares/edx-platform,apigee/edx-platform,chauhanhardik/populo_2,amir-qayyum-khan/edx-platform,jamesblunt/edx-platform,Softmotions/edx-platform,hkawasaki/kawasaki-aio8-1,morpheby/levelup-by,kalebhartje/schoolboost,LICEF/edx-platform,jolyonb/edx-platform,stvstnfrd/edx-platform,kalebhartje/schoolboost,Livit/Livit.Learn.EdX,miptliot/edx-platform,pabloborrego93/edx-platform,gymnasium/edx-platform,halvertoluke/edx-platform,polimediaupv/edx-platform,hkawasaki/kawasaki-aio8-2,rue89-tech/edx-platform,miptliot/edx-platform,lduarte1991/edx-platform,ferabra/edx-platform,Kalyzee/edx-platform,pdehaye/theming-edx-platform,pepeportela/edx-platform,alu042/edx-platform,kmoocdev2/edx-platform,halvertoluke/edx-platform,playm2mboy/edx-platform,edx/edx-platform,tanmaykm/edx-platform,wwj718/ANALYSE,4eek/edx-platform,xinjiguaike/edx-platform,syjeon/new_edx,Softmotions/edx-platform,ak2703/edx-platform,bdero/edx-platform,EDUlib/edx-platform,ovnicraft/edx-platform,itsjeyd/edx-platform,martynovp/edx-platform,ampax/edx-platform,stvstnfrd/edx-platform,vasyarv/edx-platform,auferack08/edx-platform,y12uc231/edx-platform,zofuthan/edx-platform,chudaol/edx-platform,mcgachey/edx-platform,arifsetiawan/edx-platform,MSOpenTech/edx-platform,EDUlib/edx-platform,lduarte1991/edx-platform,Lektorium-LLC/edx-platform,MakeHer/edx-platform,longmen21/edx-platform,kxliugang/edx-platform,andyzsf/edx,UXE/local-edx,kalebhartje/schoolboost,jazkarta/edx-platform,Endika/edx-platform,DefyVentures/edx-platform,xuxiao19910803/edx-platform,marcore/edx-platform,CourseTalk/edx-platform,longmen21/edx-platform,chand3040/cloud_that,jonathan-beard/edx-platform,rationalAgent/edx-platform-custom,angelapper/edx-platform,jamesblunt/edx-platform,OmarIthawi/edx-platform,IITBinterns13/edx-platform-dev,beacloudgenius/edx-platform,JCBarahona/edX,abdoosh00/edx-rtl-final,EDUlib/edx-platform,playm2mboy/edx-platform,xuxiao19910803/edx,eduNEXT/edunext-platform,OmarIthawi/edx-platform,zubair-arbi/edx-platform,defance/edx-platform,ahmedaljazzar/edx-platform,nanolearningllc/edx-platform-cypress-2,ovnicraft/edx-platform,UXE/local-edx,CourseTalk/edx-platform,shurihell/testasia,naresh21/synergetics-edx-platform,Endika/edx-platform,MakeHer/edx-platform,nttks/edx-platform,edx/edx-platform,rue89-tech/edx-platform,mcgachey/edx-platform,SivilTaram/edx-platform,marcore/edx-platform,nagyistoce/edx-platform,teltek/edx-platform,MSOpenTech/edx-platform,Lektorium-LLC/edx-platform,Stanford-Online/edx-platform,kursitet/edx-platform,alu042/edx-platform,eestay/edx-platform,unicri/edx-platform,cognitiveclass/edx-platform,Semi-global/edx-platform,ahmadio/edx-platform,utecuy/edx-platform,dsajkl/reqiop,praveen-pal/edx-platform,shubhdev/edxOnBaadal,motion2015/edx-platform,yokose-ks/edx-platform,chauhanhardik/populo,Edraak/circleci-edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-2,zhenzhai/edx-platform,apigee/edx-platform,zerobatu/edx-platform,nikolas/edx-platform,adoosii/edx-platform,playm2mboy/edx-platform,fly19890211/edx-platform,ESOedX/edx-platform,simbs/edx-platform,fly19890211/edx-platform,gymnasium/edx-platform,polimediaupv/edx-platform,mushtaqak/edx-platform,wwj718/edx-platform,naresh21/synergetics-edx-platform,B-MOOC/edx-platform,pomegranited/edx-platform,4eek/edx-platform,edx/edx-platform,Semi-global/edx-platform,antoviaque/edx-platform,IONISx/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,dsajkl/123,jbzdak/edx-platform,motion2015/a3,EduPepperPDTesting/pepper2013-testing,Livit/Livit.Learn.EdX,cpennington/edx-platform,analyseuc3m/ANALYSE-v1,franosincic/edx-platform,stvstnfrd/edx-platform,Softmotions/edx-platform,ubc/edx-platform,jruiperezv/ANALYSE,shubhdev/openedx,chauhanhardik/populo,pabloborrego93/edx-platform,deepsrijit1105/edx-platform,jswope00/griffinx,kalebhartje/schoolboost,mjirayu/sit_academy,doganov/edx-platform,chauhanhardik/populo,kmoocdev2/edx-platform,zerobatu/edx-platform,cpennington/edx-platform,ahmadiga/min_edx,dcosentino/edx-platform,don-github/edx-platform,chauhanhardik/populo,chrisndodge/edx-platform,Edraak/circleci-edx-platform,rismalrv/edx-platform,jonathan-beard/edx-platform,bitifirefly/edx-platform,sudheerchintala/LearnEraPlatForm,cognitiveclass/edx-platform,pomegranited/edx-platform,bitifirefly/edx-platform,morenopc/edx-platform,xuxiao19910803/edx,carsongee/edx-platform,PepperPD/edx-pepper-platform,adoosii/edx-platform,pku9104038/edx-platform,J861449197/edx-platform,ahmadio/edx-platform,eduNEXT/edunext-platform,cpennington/edx-platform,miptliot/edx-platform,motion2015/a3,Edraak/edx-platform,leansoft/edx-platform,mbareta/edx-platform-ft,AkA84/edx-platform,IITBinterns13/edx-platform-dev,jjmiranda/edx-platform,arbrandes/edx-platform,jamiefolsom/edx-platform,zhenzhai/edx-platform,mjg2203/edx-platform-seas,raccoongang/edx-platform,vismartltd/edx-platform,DNFcode/edx-platform,beni55/edx-platform,mahendra-r/edx-platform,martynovp/edx-platform,LearnEra/LearnEraPlaftform,a-parhom/edx-platform,raccoongang/edx-platform,Edraak/edraak-platform,devs1991/test_edx_docmode,motion2015/edx-platform,DefyVentures/edx-platform,mahendra-r/edx-platform,zofuthan/edx-platform,shabab12/edx-platform,leansoft/edx-platform,CredoReference/edx-platform,dsajkl/123,eemirtekin/edx-platform,Shrhawk/edx-platform,peterm-itr/edx-platform,edry/edx-platform,polimediaupv/edx-platform,kursitet/edx-platform,knehez/edx-platform,alexthered/kienhoc-platform,Edraak/circleci-edx-platform,shubhdev/edxOnBaadal,sudheerchintala/LearnEraPlatForm,zerobatu/edx-platform,nttks/jenkins-test,syjeon/new_edx,edry/edx-platform,franosincic/edx-platform,chauhanhardik/populo_2,openfun/edx-platform,ovnicraft/edx-platform,jzoldak/edx-platform,mushtaqak/edx-platform,nanolearningllc/edx-platform-cypress-2,ESOedX/edx-platform,carsongee/edx-platform,nttks/edx-platform,vasyarv/edx-platform,jazkarta/edx-platform,4eek/edx-platform,ak2703/edx-platform,Ayub-Khan/edx-platform,hastexo/edx-platform,IndonesiaX/edx-platform,arifsetiawan/edx-platform,benpatterson/edx-platform,bigdatauniversity/edx-platform,nagyistoce/edx-platform,rhndg/openedx,andyzsf/edx,openfun/edx-platform,benpatterson/edx-platform,J861449197/edx-platform,nanolearningllc/edx-platform-cypress,halvertoluke/edx-platform,msegado/edx-platform,openfun/edx-platform,mushtaqak/edx-platform,knehez/edx-platform,amir-qayyum-khan/edx-platform,antoviaque/edx-platform,romain-li/edx-platform,jruiperezv/ANALYSE,benpatterson/edx-platform,shubhdev/edxOnBaadal,rismalrv/edx-platform,mjirayu/sit_academy,longmen21/edx-platform,cselis86/edx-platform,jamiefolsom/edx-platform,PepperPD/edx-pepper-platform,marcore/edx-platform,TeachAtTUM/edx-platform,eduNEXT/edx-platform,appsembler/edx-platform,angelapper/edx-platform,waheedahmed/edx-platform,wwj718/edx-platform,Edraak/edx-platform,nagyistoce/edx-platform,procangroup/edx-platform,cselis86/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,chauhanhardik/populo,arbrandes/edx-platform,WatanabeYasumasa/edx-platform,doismellburning/edx-platform,olexiim/edx-platform,analyseuc3m/ANALYSE-v1,appliedx/edx-platform,iivic/BoiseStateX,carsongee/edx-platform,jbzdak/edx-platform,mcgachey/edx-platform,kmoocdev/edx-platform,waheedahmed/edx-platform,andyzsf/edx,msegado/edx-platform,chand3040/cloud_that,IONISx/edx-platform,abdoosh00/edx-rtl-final,TsinghuaX/edx-platform,BehavioralInsightsTeam/edx-platform,leansoft/edx-platform,nanolearning/edx-platform,zubair-arbi/edx-platform,devs1991/test_edx_docmode,UOMx/edx-platform,valtech-mooc/edx-platform,nikolas/edx-platform,philanthropy-u/edx-platform,BehavioralInsightsTeam/edx-platform,IONISx/edx-platform,zhenzhai/edx-platform,dsajkl/reqiop,beacloudgenius/edx-platform,mtlchun/edx,cyanna/edx-platform,etzhou/edx-platform,nanolearning/edx-platform,ampax/edx-platform,pku9104038/edx-platform,doismellburning/edx-platform,jelugbo/tundex,xingyepei/edx-platform,analyseuc3m/ANALYSE-v1,LearnEra/LearnEraPlaftform,zubair-arbi/edx-platform,prarthitm/edxplatform,defance/edx-platform,shubhdev/edx-platform,IITBinterns13/edx-platform-dev,jbzdak/edx-platform,tiagochiavericosta/edx-platform,kalebhartje/schoolboost,torchingloom/edx-platform,nanolearning/edx-platform,don-github/edx-platform,franosincic/edx-platform,romain-li/edx-platform,stvstnfrd/edx-platform,sudheerchintala/LearnEraPlatForm,CourseTalk/edx-platform,jswope00/GAI,nikolas/edx-platform,philanthropy-u/edx-platform,ampax/edx-platform,beni55/edx-platform,shubhdev/edx-platform,yokose-ks/edx-platform,J861449197/edx-platform,IndonesiaX/edx-platform,unicri/edx-platform,ZLLab-Mooc/edx-platform,proversity-org/edx-platform,alexthered/kienhoc-platform,mbareta/edx-platform-ft,4eek/edx-platform,jbassen/edx-platform,TeachAtTUM/edx-platform,eemirtekin/edx-platform,praveen-pal/edx-platform,auferack08/edx-platform,halvertoluke/edx-platform,waheedahmed/edx-platform,DNFcode/edx-platform,ovnicraft/edx-platform,Shrhawk/edx-platform,polimediaupv/edx-platform,fintech-circle/edx-platform,appliedx/edx-platform,nanolearningllc/edx-platform-cypress-2,bitifirefly/edx-platform,B-MOOC/edx-platform,beni55/edx-platform,peterm-itr/edx-platform,shurihell/testasia,alu042/edx-platform,zhenzhai/edx-platform,adoosii/edx-platform,cecep-edu/edx-platform,LICEF/edx-platform,RPI-OPENEDX/edx-platform,shabab12/edx-platform,chudaol/edx-platform,ahmadiga/min_edx,ampax/edx-platform,dcosentino/edx-platform,martynovp/edx-platform,edx-solutions/edx-platform,LICEF/edx-platform,defance/edx-platform,B-MOOC/edx-platform,cecep-edu/edx-platform,chrisndodge/edx-platform,dkarakats/edx-platform,deepsrijit1105/edx-platform,atsolakid/edx-platform,jolyonb/edx-platform,jamiefolsom/edx-platform,kxliugang/edx-platform,valtech-mooc/edx-platform,fly19890211/edx-platform,OmarIthawi/edx-platform,cognitiveclass/edx-platform,tiagochiavericosta/edx-platform,vismartltd/edx-platform,zadgroup/edx-platform,kursitet/edx-platform,dsajkl/123,ak2703/edx-platform,jazkarta/edx-platform,alexthered/kienhoc-platform,xuxiao19910803/edx,MSOpenTech/edx-platform,devs1991/test_edx_docmode,a-parhom/edx-platform,cyanna/edx-platform,jazkarta/edx-platform,LICEF/edx-platform,motion2015/a3,utecuy/edx-platform,adoosii/edx-platform,EduPepperPDTesting/pepper2013-testing,jruiperezv/ANALYSE,Edraak/edraak-platform,mjirayu/sit_academy,Semi-global/edx-platform,jelugbo/tundex,ubc/edx-platform,xinjiguaike/edx-platform,ovnicraft/edx-platform,edx-solutions/edx-platform,Livit/Livit.Learn.EdX,jazztpt/edx-platform,teltek/edx-platform,rationalAgent/edx-platform-custom,itsjeyd/edx-platform,utecuy/edx-platform,jelugbo/tundex,morenopc/edx-platform,romain-li/edx-platform,Unow/edx-platform,edx-solutions/edx-platform,shurihell/testasia,deepsrijit1105/edx-platform,SravanthiSinha/edx-platform,rue89-tech/edx-platform,tanmaykm/edx-platform,bigdatauniversity/edx-platform,SravanthiSinha/edx-platform,JioEducation/edx-platform,ESOedX/edx-platform,zofuthan/edx-platform,solashirai/edx-platform,jazztpt/edx-platform,EDUlib/edx-platform,shashank971/edx-platform,vikas1885/test1,nanolearningllc/edx-platform-cypress,shubhdev/edx-platform,shurihell/testasia,mushtaqak/edx-platform,Semi-global/edx-platform,jjmiranda/edx-platform,syjeon/new_edx,pku9104038/edx-platform,UOMx/edx-platform,hkawasaki/kawasaki-aio8-0,zadgroup/edx-platform,procangroup/edx-platform,marcore/edx-platform,ferabra/edx-platform,10clouds/edx-platform,dcosentino/edx-platform,msegado/edx-platform,Lektorium-LLC/edx-platform,zofuthan/edx-platform,abdoosh00/edraak,proversity-org/edx-platform,arbrandes/edx-platform,xingyepei/edx-platform,synergeticsedx/deployment-wipro,jswope00/GAI,miptliot/edx-platform,Kalyzee/edx-platform,jolyonb/edx-platform,simbs/edx-platform,peterm-itr/edx-platform,Edraak/circleci-edx-platform,torchingloom/edx-platform,TeachAtTUM/edx-platform,shubhdev/openedx,pepeportela/edx-platform,ferabra/edx-platform,jbassen/edx-platform,kamalx/edx-platform,cecep-edu/edx-platform,tiagochiavericosta/edx-platform,pomegranited/edx-platform,pelikanchik/edx-platform,shashank971/edx-platform,benpatterson/edx-platform,wwj718/ANALYSE,iivic/BoiseStateX,kmoocdev2/edx-platform,raccoongang/edx-platform,sameetb-cuelogic/edx-platform-test,eestay/edx-platform,DNFcode/edx-platform,motion2015/edx-platform,jbassen/edx-platform,mjirayu/sit_academy,motion2015/edx-platform,jamesblunt/edx-platform,rismalrv/edx-platform,franosincic/edx-platform,chauhanhardik/populo_2,devs1991/test_edx_docmode,vikas1885/test1,abdoosh00/edraak,pelikanchik/edx-platform,EduPepperPD/pepper2013,vasyarv/edx-platform,RPI-OPENEDX/edx-platform,y12uc231/edx-platform,hamzehd/edx-platform,MakeHer/edx-platform,rue89-tech/edx-platform,xinjiguaike/edx-platform,Ayub-Khan/edx-platform,jazztpt/edx-platform,pdehaye/theming-edx-platform,olexiim/edx-platform,jbassen/edx-platform,gsehub/edx-platform,hastexo/edx-platform,nanolearning/edx-platform,appliedx/edx-platform,nanolearning/edx-platform,olexiim/edx-platform,MakeHer/edx-platform,xuxiao19910803/edx-platform,jswope00/GAI,10clouds/edx-platform,solashirai/edx-platform,don-github/edx-platform,jzoldak/edx-platform,SravanthiSinha/edx-platform,fintech-circle/edx-platform,praveen-pal/edx-platform,philanthropy-u/edx-platform,nttks/jenkins-test,wwj718/edx-platform,ZLLab-Mooc/edx-platform,DefyVentures/edx-platform,prarthitm/edxplatform,rue89-tech/edx-platform,kxliugang/edx-platform,doganov/edx-platform,kmoocdev/edx-platform,dsajkl/reqiop,amir-qayyum-khan/edx-platform,pabloborrego93/edx-platform,mjg2203/edx-platform-seas,hkawasaki/kawasaki-aio8-1,hmcmooc/muddx-platform,mahendra-r/edx-platform,Ayub-Khan/edx-platform,Semi-global/edx-platform,pepeportela/edx-platform,zubair-arbi/edx-platform,teltek/edx-platform,J861449197/edx-platform,doismellburning/edx-platform,ahmadiga/min_edx,zadgroup/edx-platform,playm2mboy/edx-platform,mcgachey/edx-platform,inares/edx-platform,UOMx/edx-platform,synergeticsedx/deployment-wipro,jswope00/griffinx,jamesblunt/edx-platform,ahmadio/edx-platform,zubair-arbi/edx-platform,jazkarta/edx-platform-for-isc,prarthitm/edxplatform,Edraak/edx-platform,rismalrv/edx-platform,ESOedX/edx-platform,Edraak/circleci-edx-platform,hmcmooc/muddx-platform,bigdatauniversity/edx-platform,solashirai/edx-platform,jbassen/edx-platform,nttks/edx-platform,cognitiveclass/edx-platform,B-MOOC/edx-platform,ampax/edx-platform-backup,mitocw/edx-platform,mtlchun/edx,10clouds/edx-platform,mtlchun/edx,nanolearningllc/edx-platform-cypress,doismellburning/edx-platform,unicri/edx-platform,beni55/edx-platform,eemirtekin/edx-platform,jjmiranda/edx-platform,hmcmooc/muddx-platform,amir-qayyum-khan/edx-platform,shabab12/edx-platform,hkawasaki/kawasaki-aio8-0,adoosii/edx-platform,benpatterson/edx-platform,zhenzhai/edx-platform,jazztpt/edx-platform,devs1991/test_edx_docmode,louyihua/edx-platform,EduPepperPDTesting/pepper2013-testing,TsinghuaX/edx-platform,ahmadio/edx-platform,alexthered/kienhoc-platform,y12uc231/edx-platform,kmoocdev/edx-platform,bdero/edx-platform | ---
+++
@@ -13,6 +13,7 @@
# for a description of entry_points
entry_points={
'xmodule.v1': [
+ "chapter = seq_module:SequenceDescriptor",
"course = seq_module:SequenceDescriptor",
"html = html_module:HtmlModuleDescriptor",
] |
cb5591228408ccd22cfb3df9106d4ecdeac295a9 | nose2/plugins/collect.py | nose2/plugins/collect.py | """
This plugin implements :func:`startTestRun`, setting a test executor
(``event.executeTests``) that just collects tests without executing
them. To do so it calls result.startTest, result.addSuccess and
result.stopTest for ech test, without calling the test itself.
"""
from nose2.events import Plugin
from nose2.compat import unittest
__unittest = True
class CollectOnly(Plugin):
"""Collect but don't run tests"""
configSection = 'collect-only'
commandLineSwitch = (None, 'collect-only',
'Collect and output test names; do not run any tests')
_mpmode = False
def registerInSubprocess(self, event):
event.pluginClasses.append(self.__class__)
self._mpmode = True
def startTestRun(self, event):
"""Replace ``event.executeTests``"""
if self._mpmode:
return
event.executeTests = self.collectTests
def startSubprocess(self, event):
event.executeTests = self.collectTests
def collectTests(self, suite, result):
"""Collect tests, but don't run them"""
for test in suite:
if isinstance(test, unittest.BaseTestSuite):
self.collectTests(test, result)
continue
result.startTest(test)
result.addSuccess(test)
result.stopTest(test)
| """
This plugin implements :func:`startTestRun`, setting a test executor
(``event.executeTests``) that just collects tests without executing
them. To do so it calls result.startTest, result.addSuccess and
result.stopTest for each test, without calling the test itself.
"""
from nose2.events import Plugin
from nose2.compat import unittest
__unittest = True
class CollectOnly(Plugin):
"""Collect but don't run tests"""
configSection = 'collect-only'
commandLineSwitch = (None, 'collect-only',
'Collect and output test names; do not run any tests')
_mpmode = False
def registerInSubprocess(self, event):
event.pluginClasses.append(self.__class__)
self._mpmode = True
def startTestRun(self, event):
"""Replace ``event.executeTests``"""
if self._mpmode:
return
event.executeTests = self.collectTests
def startSubprocess(self, event):
event.executeTests = self.collectTests
def collectTests(self, suite, result):
"""Collect tests, but don't run them"""
for test in suite:
if isinstance(test, unittest.BaseTestSuite):
self.collectTests(test, result)
continue
result.startTest(test)
result.addSuccess(test)
result.stopTest(test)
| Fix typo "ech" -> "each" | Fix typo "ech" -> "each"
| Python | bsd-2-clause | ptthiem/nose2,ptthiem/nose2 | ---
+++
@@ -2,7 +2,7 @@
This plugin implements :func:`startTestRun`, setting a test executor
(``event.executeTests``) that just collects tests without executing
them. To do so it calls result.startTest, result.addSuccess and
-result.stopTest for ech test, without calling the test itself.
+result.stopTest for each test, without calling the test itself.
"""
from nose2.events import Plugin
from nose2.compat import unittest |
9b5202ef3c27b44e70567b72a0657ae40557c041 | tests/test_cobrapy_compatibility.py | tests/test_cobrapy_compatibility.py | # Copyright 2014 Novo Nordisk Foundation Center for Biosustainability, DTU.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import types
from cobra.test import create_test_model
from cobra.test.unit_tests import CobraTestCase, TestReactions
from cobra.test.flux_analysis import TestCobraFluxAnalysis
from cameo.core.solver_based_model import to_solver_based_model, SolverBasedModel
def setUp(self):
# Make Model pickable and then load a solver based version of test_pickle
self.model = to_solver_based_model(create_test_model())
self.model_class = SolverBasedModel
for cls in (CobraTestCase, TestReactions, TestCobraFluxAnalysis):
cls.setUp = types.MethodType(setUp, cls)
del TestCobraFluxAnalysis.test_single_deletion | # Copyright 2014 Novo Nordisk Foundation Center for Biosustainability, DTU.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import types
from cobra.test import create_test_model
from cobra.test.unit_tests import CobraTestCase, TestReactions
from cobra.test.flux_analysis import TestCobraFluxAnalysis
from cameo.core.solver_based_model import to_solver_based_model, SolverBasedModel
def setUp(self):
# Make Model pickable and then load a solver based version of test_pickle
self.model = to_solver_based_model(create_test_model())
self.model_class = SolverBasedModel
for cls in (CobraTestCase, TestReactions, TestCobraFluxAnalysis):
cls.setUp = types.MethodType(setUp, cls)
del TestCobraFluxAnalysis.test_single_gene_deletion | Remove test_single_gene_deletion (renamed in cobra=0.4.0b1) | Remove test_single_gene_deletion (renamed in cobra=0.4.0b1)
| Python | apache-2.0 | KristianJensen/cameo,biosustain/cameo,biosustain/cameo | ---
+++
@@ -28,4 +28,4 @@
for cls in (CobraTestCase, TestReactions, TestCobraFluxAnalysis):
cls.setUp = types.MethodType(setUp, cls)
-del TestCobraFluxAnalysis.test_single_deletion
+del TestCobraFluxAnalysis.test_single_gene_deletion |
16d99a20088e81045e34999b6045e9222d510cd5 | app.py | app.py | # -*- coding: UTF-8 -*-
"""
trytond_async.celery
Implementation of the celery app
This module is named celery because of the way celery workers lookup
the app when `--proj` argument is passed to the worker. For more details
see the celery documentation at:
http://docs.celeryproject.org/en/latest/getting-started/next-steps.html#about-the-app-argument
"""
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
| # -*- coding: UTF-8 -*-
"""
trytond_async.celery
Implementation of the celery app
This module is named celery because of the way celery workers lookup
the app when `--proj` argument is passed to the worker. For more details
see the celery documentation at:
http://docs.celeryproject.org/en/latest/getting-started/next-steps.html#about-the-app-argument
"""
from __future__ import absolute_import
import os
from celery import Celery
from trytond.config import config
try:
from raven import Client
from raven.contrib.celery import register_signal
except ImportError:
pass
else:
if os.environ.get('SENTRY_DSN'):
register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
broker_url = config.get('async', 'broker_url')
backend_url = config.get('async', 'backend_url')
app = Celery(
'trytond_async',
broker=broker_url or os.environ.get('TRYTOND_ASYNC__BROKER_URL'),
backend=backend_url or os.environ.get('TRYTOND_ASYNC__BACKEND_URL'),
include=['trytond_async.tasks']
)
app.conf.update(
CELERY_TASK_RESULT_EXPIRES=3600,
CELERY_TASK_SERIALIZER='tryson',
CELERY_RESULT_SERIALIZER='tryson',
CELERY_ACCEPT_CONTENT=[
'application/x-tryson',
'application/x-python-serialize'
]
)
if __name__ == '__main__':
app.start()
| Use raven for logging if available | Use raven for logging if available
| Python | bsd-3-clause | fulfilio/trytond-async,tarunbhardwaj/trytond-async | ---
+++
@@ -15,6 +15,15 @@
from celery import Celery
from trytond.config import config
+
+try:
+ from raven import Client
+ from raven.contrib.celery import register_signal
+except ImportError:
+ pass
+else:
+ if os.environ.get('SENTRY_DSN'):
+ register_signal(Client(os.environ.get('SENTRY_DSN')))
config.update_etc()
|
9121c8c074a31fd3668f8281c7f093360ed72988 | salad/cli.py | salad/cli.py | import sys
import argparse
from lettuce.bin import main as lettuce_main
from lettuce import world
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
BROWSER_CHOICES = [browser.lower()
for browser in DesiredCapabilities.__dict__.keys()
if not browser.startswith('_')]
BROWSER_CHOICES.append('zope.testbrowser')
BROWSER_CHOICES.sort()
DEFAULT_BROWSER = 'firefox'
def main(args=sys.argv[1:]):
parser = argparse.ArgumentParser(prog="Salad", description='BDD browswer-automation made tasty.')
parser.add_argument('--browser', default=DEFAULT_BROWSER,
metavar='BROWSER', choices=BROWSER_CHOICES,
help=('Browser to use. Options: %s Default is %s.' %
(BROWSER_CHOICES, DEFAULT_BROWSER)))
parser.add_argument('--remote-url',
help='Selenium server url for remote browsers')
parser.add_argument('args', nargs=argparse.REMAINDER)
parsed_args = parser.parse_args()
world.drivers = [parsed_args.browser]
world.remote_url = parsed_args.remote_url
lettuce_main(args=parsed_args.args)
if __name__ == '__main__':
main()
| import sys
import argparse
from lettuce.bin import main as lettuce_main
from lettuce import world
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
BROWSER_CHOICES = [browser.lower()
for browser in DesiredCapabilities.__dict__.keys()
if not browser.startswith('_')]
BROWSER_CHOICES.append('zope.testbrowser')
BROWSER_CHOICES.sort()
DEFAULT_BROWSER = 'firefox'
def main(args=sys.argv[1:]):
parser = argparse.ArgumentParser(prog="Salad", description='BDD browswer-automation made tasty.')
parser.add_argument('--browser', default=DEFAULT_BROWSER,
metavar='BROWSER', choices=BROWSER_CHOICES,
help=('Browser to use. Options: %s Default is %s.' %
(BROWSER_CHOICES, DEFAULT_BROWSER)))
parser.add_argument('--remote-url',
help='Selenium server url for remote browsers')
(parsed_args, leftovers) = parser.parse_known_args()
world.drivers = [parsed_args.browser]
world.remote_url = parsed_args.remote_url
lettuce_main(args=leftovers)
if __name__ == '__main__':
main()
| Use parse_known_args and pass leftovers to lettuce | Use parse_known_args and pass leftovers to lettuce
Seems to be more reliable at handling weird argument ordering than
REMAINDER was
| Python | bsd-3-clause | salad/salad,salad/salad,beanqueen/salad,beanqueen/salad | ---
+++
@@ -22,12 +22,11 @@
(BROWSER_CHOICES, DEFAULT_BROWSER)))
parser.add_argument('--remote-url',
help='Selenium server url for remote browsers')
- parser.add_argument('args', nargs=argparse.REMAINDER)
- parsed_args = parser.parse_args()
+ (parsed_args, leftovers) = parser.parse_known_args()
world.drivers = [parsed_args.browser]
world.remote_url = parsed_args.remote_url
- lettuce_main(args=parsed_args.args)
+ lettuce_main(args=leftovers)
if __name__ == '__main__':
main() |
c79944c39cb109430839cd92a366ab734aee0f64 | ievv_opensource/ievv_sms/apps.py | ievv_opensource/ievv_sms/apps.py | from django.apps import AppConfig
class ModelSmsAppConfig(AppConfig):
name = 'ievv_opensource.ievv_sms'
verbose_name = "IEVV SMS"
def ready(self):
from ievv_opensource.ievv_sms import sms_registry
from ievv_opensource.ievv_sms.backends import debugprint
registry = sms_registry.Registry.get_instance()
registry.add(debugprint.Backend)
registry.add(debugprint.Latin1Backend)
| from django.apps import AppConfig
class ModelSmsAppConfig(AppConfig):
name = 'ievv_opensource.ievv_sms'
verbose_name = "IEVV SMS"
def ready(self):
from ievv_opensource.ievv_sms import sms_registry
from ievv_opensource.ievv_sms.backends import debugprint
from ievv_opensource.ievv_sms.backends import pswin
registry = sms_registry.Registry.get_instance()
registry.add(debugprint.Backend)
registry.add(debugprint.Latin1Backend)
registry.add(pswin.Backend)
| Add pswin backend to registry. | ievv_sms: Add pswin backend to registry.
| Python | bsd-3-clause | appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource,appressoas/ievv_opensource | ---
+++
@@ -8,6 +8,8 @@
def ready(self):
from ievv_opensource.ievv_sms import sms_registry
from ievv_opensource.ievv_sms.backends import debugprint
+ from ievv_opensource.ievv_sms.backends import pswin
registry = sms_registry.Registry.get_instance()
registry.add(debugprint.Backend)
registry.add(debugprint.Latin1Backend)
+ registry.add(pswin.Backend) |
a9b1d08e2e248b606ef269ebc7e3fb44698d3efc | a_john_shots/__init__.py | a_john_shots/__init__.py | #!/bin/env python
# A-John-Shots - Python module/library for saving Security Hash Algorithms into JSON format.
# Copyright (C) 2017 Funilrys - Nissar Chababy <contact at funilrys dot com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Original Version: https://github.com/funilrys/A-John-Shots
from .core import Core
def get(path, **args):
return Core(path, **args).get()
| #!/bin/env python
# A-John-Shots - Python module/library for saving Security Hash Algorithms into JSON format.
# Copyright (C) 2017 Funilrys - Nissar Chababy <contact at funilrys dot com>
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# Original Version: https://github.com/funilrys/A-John-Shots
from .core import Core
def get(path, **args):
"""
A simple script to get Security Hash Algorithms into JSON format
:param path: A string, the path of the file or the directory we have to return.
:param search: A string, the pattern the file have to match in ordrer to be included in the results
:param output: A bool, Print on screen (False), print on file (True)
:param output_destination: A string, the destination of the results
:param algorithm: A string, the algorithm to use. Possibility: all, sha1, sha224, sha384, sha512
:param exclude: A list, the list of path, filename or in general, a pattern to exclude
"""
return Core(path, **args).get()
| Update licence header + Introduction of docstrings | Update licence header + Introduction of docstrings
| Python | mit | funilrys/A-John-Shots | ---
+++
@@ -3,18 +3,13 @@
# A-John-Shots - Python module/library for saving Security Hash Algorithms into JSON format.
# Copyright (C) 2017 Funilrys - Nissar Chababy <contact at funilrys dot com>
#
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License
-# along with this program. If not, see <http://www.gnu.org/licenses/>.
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+# SOFTWARE.
# Original Version: https://github.com/funilrys/A-John-Shots
@@ -23,4 +18,15 @@
def get(path, **args):
+ """
+ A simple script to get Security Hash Algorithms into JSON format
+
+ :param path: A string, the path of the file or the directory we have to return.
+ :param search: A string, the pattern the file have to match in ordrer to be included in the results
+ :param output: A bool, Print on screen (False), print on file (True)
+ :param output_destination: A string, the destination of the results
+ :param algorithm: A string, the algorithm to use. Possibility: all, sha1, sha224, sha384, sha512
+ :param exclude: A list, the list of path, filename or in general, a pattern to exclude
+ """
+
return Core(path, **args).get() |
21ac10919e4c63e8bb8760e186438e335a8f188c | setup_git.py | setup_git.py | #!/usr/bin/env python
"""Call the main setup_git.py. This should be
copied to the main directory of your project and named setup_git.py."""
import os
import os.path
os.system(os.path.join("tools", "dev_tools", "git", "setup_git.py"))
| #!/usr/bin/env python3
"""Call the main setup_git.py. This should be
copied to the main directory of your project and named setup_git.py."""
import os
import os.path
os.system(os.path.join("tools", "dev_tools", "git", "setup_git.py"))
| Use Python 3 by default | Use Python 3 by default
| Python | apache-2.0 | salilab/rmf,salilab/rmf,salilab/rmf,salilab/rmf | ---
+++
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
"""Call the main setup_git.py. This should be
copied to the main directory of your project and named setup_git.py.""" |
fc740373b772d46c8a0a3bf2cacef503df7e69ad | bokeh/models/mappers.py | bokeh/models/mappers.py | from __future__ import absolute_import
import numpy as np
from six import string_types
from ..plot_object import PlotObject
from ..properties import Any, Float, Color
from .. import palettes
class ColorMapper(PlotObject):
''' Base class for color mapper objects. '''
pass
class LinearColorMapper(ColorMapper):
# TODO (bev) use Array property
palette = Any # Array
low = Float
high = Float
reserve_color = Color("#ffffff") #TODO: What is the color code for transparent???
reserve_val = Float(default=None)
def __init__(self, *args, **kwargs):
pal = args[0] if len(args) > 0 else kwargs.get('palette', [])
if isinstance(pal, string_types):
palette = getattr(palettes, pal, None)
if palette is None:
raise ValueError("Unknown palette name '%s'" % pal)
kwargs['palette'] = np.array(palette)
else:
if not all(isinstance(x, string_types) and x.startswith('#') for x in pal):
raise ValueError("Malformed palette: '%s'" % pal)
kwargs['palette'] = np.array(pal)
super(LinearColorMapper, self).__init__(**kwargs)
def map_from_index(self, indices):
return self.palette[np.array(indices)]
def map_from_value(self, values):
x = np.array(values)
if self.low: low = self.low
else: low = min(values)
if self.high: high = self.high
else: high = max(values)
N = len(self.palette)
scale = N/float(high-low)
offset = -scale*low
indices = np.floor(x*scale+offset).astype('int')
indices[indices==len(self.palette)] -= 1
return self.palette[indices]
def reverse(self):
self.palette = self.palette[::-1]
| from __future__ import absolute_import
import numpy as np
from six import string_types
from ..plot_object import PlotObject
from ..properties import Any, Float, Color
from .. import palettes
class ColorMapper(PlotObject):
''' Base class for color mapper objects. '''
pass
class LinearColorMapper(ColorMapper):
# TODO (bev) use Array property
palette = Any # Array
low = Float
high = Float
reserve_color = Color("#ffffff") #TODO: What is the color code for transparent???
reserve_val = Float(default=None)
def __init__(self, *args, **kwargs):
pal = args[0] if len(args) > 0 else kwargs.get('palette', [])
if isinstance(pal, string_types):
palette = getattr(palettes, pal, None)
if palette is None:
raise ValueError("Unknown palette name '%s'" % pal)
kwargs['palette'] = np.array(palette)
else:
if not all(isinstance(x, string_types) and x.startswith('#') for x in pal):
raise ValueError("Malformed palette: '%s'" % pal)
kwargs['palette'] = np.array(pal)
super(LinearColorMapper, self).__init__(**kwargs)
| Remove useless code from LinearColorMapper | Remove useless code from LinearColorMapper
| Python | bsd-3-clause | timsnyder/bokeh,khkaminska/bokeh,KasperPRasmussen/bokeh,almarklein/bokeh,rhiever/bokeh,matbra/bokeh,schoolie/bokeh,aavanian/bokeh,draperjames/bokeh,srinathv/bokeh,rs2/bokeh,rothnic/bokeh,CrazyGuo/bokeh,evidation-health/bokeh,ericmjl/bokeh,philippjfr/bokeh,stuart-knock/bokeh,DuCorey/bokeh,gpfreitas/bokeh,ptitjano/bokeh,saifrahmed/bokeh,timsnyder/bokeh,carlvlewis/bokeh,saifrahmed/bokeh,schoolie/bokeh,khkaminska/bokeh,eteq/bokeh,htygithub/bokeh,josherick/bokeh,stonebig/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,roxyboy/bokeh,paultcochrane/bokeh,PythonCharmers/bokeh,timsnyder/bokeh,khkaminska/bokeh,rothnic/bokeh,dennisobrien/bokeh,muku42/bokeh,DuCorey/bokeh,ericdill/bokeh,caseyclements/bokeh,jakirkham/bokeh,rhiever/bokeh,tacaswell/bokeh,stonebig/bokeh,jplourenco/bokeh,aiguofer/bokeh,rhiever/bokeh,DuCorey/bokeh,lukebarnard1/bokeh,azjps/bokeh,percyfal/bokeh,josherick/bokeh,daodaoliang/bokeh,carlvlewis/bokeh,clairetang6/bokeh,tacaswell/bokeh,abele/bokeh,lukebarnard1/bokeh,mindriot101/bokeh,timsnyder/bokeh,alan-unravel/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,bokeh/bokeh,clairetang6/bokeh,ChristosChristofidis/bokeh,philippjfr/bokeh,xguse/bokeh,aiguofer/bokeh,draperjames/bokeh,abele/bokeh,roxyboy/bokeh,xguse/bokeh,daodaoliang/bokeh,rothnic/bokeh,birdsarah/bokeh,jplourenco/bokeh,srinathv/bokeh,paultcochrane/bokeh,draperjames/bokeh,percyfal/bokeh,ahmadia/bokeh,caseyclements/bokeh,mindriot101/bokeh,tacaswell/bokeh,schoolie/bokeh,ChristosChristofidis/bokeh,timothydmorton/bokeh,KasperPRasmussen/bokeh,timothydmorton/bokeh,KasperPRasmussen/bokeh,muku42/bokeh,ChristosChristofidis/bokeh,canavandl/bokeh,jplourenco/bokeh,roxyboy/bokeh,caseyclements/bokeh,laurent-george/bokeh,paultcochrane/bokeh,timothydmorton/bokeh,bsipocz/bokeh,laurent-george/bokeh,draperjames/bokeh,satishgoda/bokeh,srinathv/bokeh,dennisobrien/bokeh,quasiben/bokeh,bokeh/bokeh,msarahan/bokeh,satishgoda/bokeh,matbra/bokeh,canavandl/bokeh,jakirkham/bokeh,jakirkham/bokeh,xguse/bokeh,philippjfr/bokeh,mutirri/bokeh,daodaoliang/bokeh,saifrahmed/bokeh,percyfal/bokeh,ericmjl/bokeh,rs2/bokeh,mutirri/bokeh,khkaminska/bokeh,Karel-van-de-Plassche/bokeh,roxyboy/bokeh,Karel-van-de-Plassche/bokeh,jplourenco/bokeh,msarahan/bokeh,muku42/bokeh,ChristosChristofidis/bokeh,htygithub/bokeh,quasiben/bokeh,maxalbert/bokeh,ahmadia/bokeh,aavanian/bokeh,ChinaQuants/bokeh,stonebig/bokeh,clairetang6/bokeh,canavandl/bokeh,msarahan/bokeh,laurent-george/bokeh,deeplook/bokeh,birdsarah/bokeh,rhiever/bokeh,bokeh/bokeh,justacec/bokeh,caseyclements/bokeh,ericmjl/bokeh,CrazyGuo/bokeh,rs2/bokeh,evidation-health/bokeh,akloster/bokeh,deeplook/bokeh,mindriot101/bokeh,evidation-health/bokeh,PythonCharmers/bokeh,aavanian/bokeh,akloster/bokeh,timothydmorton/bokeh,phobson/bokeh,gpfreitas/bokeh,DuCorey/bokeh,josherick/bokeh,bsipocz/bokeh,percyfal/bokeh,quasiben/bokeh,carlvlewis/bokeh,rs2/bokeh,almarklein/bokeh,ahmadia/bokeh,ahmadia/bokeh,azjps/bokeh,htygithub/bokeh,stonebig/bokeh,timsnyder/bokeh,eteq/bokeh,bokeh/bokeh,abele/bokeh,muku42/bokeh,phobson/bokeh,stuart-knock/bokeh,lukebarnard1/bokeh,eteq/bokeh,saifrahmed/bokeh,phobson/bokeh,dennisobrien/bokeh,ericdill/bokeh,evidation-health/bokeh,KasperPRasmussen/bokeh,htygithub/bokeh,almarklein/bokeh,mindriot101/bokeh,stuart-knock/bokeh,dennisobrien/bokeh,Karel-van-de-Plassche/bokeh,tacaswell/bokeh,canavandl/bokeh,ericdill/bokeh,akloster/bokeh,maxalbert/bokeh,mutirri/bokeh,awanke/bokeh,dennisobrien/bokeh,CrazyGuo/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,awanke/bokeh,gpfreitas/bokeh,ericmjl/bokeh,maxalbert/bokeh,akloster/bokeh,ptitjano/bokeh,lukebarnard1/bokeh,justacec/bokeh,matbra/bokeh,laurent-george/bokeh,gpfreitas/bokeh,ChinaQuants/bokeh,azjps/bokeh,philippjfr/bokeh,rothnic/bokeh,birdsarah/bokeh,ptitjano/bokeh,alan-unravel/bokeh,eteq/bokeh,ericdill/bokeh,justacec/bokeh,azjps/bokeh,ericmjl/bokeh,aiguofer/bokeh,satishgoda/bokeh,draperjames/bokeh,ChinaQuants/bokeh,aavanian/bokeh,phobson/bokeh,matbra/bokeh,msarahan/bokeh,bokeh/bokeh,daodaoliang/bokeh,CrazyGuo/bokeh,birdsarah/bokeh,schoolie/bokeh,bsipocz/bokeh,Karel-van-de-Plassche/bokeh,jakirkham/bokeh,ptitjano/bokeh,deeplook/bokeh,deeplook/bokeh,clairetang6/bokeh,mutirri/bokeh,carlvlewis/bokeh,alan-unravel/bokeh,PythonCharmers/bokeh,alan-unravel/bokeh,aavanian/bokeh,paultcochrane/bokeh,awanke/bokeh,aiguofer/bokeh,phobson/bokeh,maxalbert/bokeh,awanke/bokeh,xguse/bokeh,PythonCharmers/bokeh,justacec/bokeh,josherick/bokeh,azjps/bokeh,percyfal/bokeh,satishgoda/bokeh,rs2/bokeh,philippjfr/bokeh,abele/bokeh,stuart-knock/bokeh,bsipocz/bokeh,aiguofer/bokeh,srinathv/bokeh,DuCorey/bokeh | ---
+++
@@ -15,6 +15,7 @@
# TODO (bev) use Array property
palette = Any # Array
+
low = Float
high = Float
@@ -35,26 +36,3 @@
kwargs['palette'] = np.array(pal)
super(LinearColorMapper, self).__init__(**kwargs)
-
- def map_from_index(self, indices):
- return self.palette[np.array(indices)]
-
- def map_from_value(self, values):
- x = np.array(values)
-
- if self.low: low = self.low
- else: low = min(values)
-
- if self.high: high = self.high
- else: high = max(values)
-
- N = len(self.palette)
- scale = N/float(high-low)
- offset = -scale*low
-
- indices = np.floor(x*scale+offset).astype('int')
- indices[indices==len(self.palette)] -= 1
- return self.palette[indices]
-
- def reverse(self):
- self.palette = self.palette[::-1] |
af73cf9c453f975e6287b2e4fd53aab27978f7a8 | app/handlers/__init__.py | app/handlers/__init__.py | __version__ = "2020.11.2"
__versionfull__ = __version__
| __version__ = "2021.3.0"
__versionfull__ = __version__
| Bump app version to 2021.3.0 | Bump app version to 2021.3.0
Signed-off-by: Guillaume Tucker <e7e20b7c30f19d4ca6b81319ef81bf200369d137@collabora.com>
| Python | lgpl-2.1 | kernelci/kernelci-backend,kernelci/kernelci-backend | ---
+++
@@ -1,2 +1,2 @@
-__version__ = "2020.11.2"
+__version__ = "2021.3.0"
__versionfull__ = __version__ |
3ca03031599e2a1673c6349710489938f60f6a4d | rwt/tests/test_launch.py | rwt/tests/test_launch.py | from rwt import launch
def test_with_path(tmpdir, capfd):
params = ['-c', 'import sys; print(sys.path)' ]
launch.with_path(str(tmpdir), params)
out, err = capfd.readouterr()
assert str(tmpdir) in out
| import sys
import subprocess
import textwrap
from rwt import launch
def test_with_path(tmpdir, capfd):
params = ['-c', 'import sys; print(sys.path)']
launch.with_path(str(tmpdir), params)
out, err = capfd.readouterr()
assert str(tmpdir) in out
def test_with_path_overlay(tmpdir, capfd):
params = ['-c', 'import sys; print(sys.path)']
# launch subprocess so as not to overlay the test process
script = textwrap.dedent("""
import rwt.launch
rwt.launch.with_path_overlay({tmpdir!r}, {params!r})
""").strip().replace('\n', '; ').format(tmpdir=str(tmpdir), params=params)
subprocess.Popen([sys.executable, '-c', script]).wait()
out, err = capfd.readouterr()
assert str(tmpdir) in out
| Add test for with_path_overlay also. | Add test for with_path_overlay also.
| Python | mit | jaraco/rwt | ---
+++
@@ -1,8 +1,24 @@
+import sys
+import subprocess
+import textwrap
+
from rwt import launch
def test_with_path(tmpdir, capfd):
- params = ['-c', 'import sys; print(sys.path)' ]
+ params = ['-c', 'import sys; print(sys.path)']
launch.with_path(str(tmpdir), params)
out, err = capfd.readouterr()
assert str(tmpdir) in out
+
+
+def test_with_path_overlay(tmpdir, capfd):
+ params = ['-c', 'import sys; print(sys.path)']
+ # launch subprocess so as not to overlay the test process
+ script = textwrap.dedent("""
+ import rwt.launch
+ rwt.launch.with_path_overlay({tmpdir!r}, {params!r})
+ """).strip().replace('\n', '; ').format(tmpdir=str(tmpdir), params=params)
+ subprocess.Popen([sys.executable, '-c', script]).wait()
+ out, err = capfd.readouterr()
+ assert str(tmpdir) in out |
954dd1a5ebafbe32c092265f5f508158f8b2742d | straight/plugin/loader.py | straight/plugin/loader.py | """Facility to load plugins."""
import sys
import os
from importlib import import_module
class StraightPluginLoader(object):
"""Performs the work of locating and loading straight plugins.
This looks for plugins in every location in the import path.
"""
def _findPluginFilePaths(self, namespace):
already_seen = set()
# Look in each location in the path
for path in sys.path:
# Within this, we want to look for a package for the namespace
namespace_rel_path = namespace.replace(".", os.path.sep)
namespace_path = os.path.join(path, namespace_rel_path)
if os.path.exists(namespace_path):
for possible in os.listdir(namespace_path):
base, ext = os.path.splitext(possible)
if base == '__init__' or ext != '.py':
continue
if base not in already_seen:
already_seen.add(base)
yield os.path.join(namespace, possible)
def _findPluginModules(self, namespace):
for filepath in self._findPluginFilePaths(namespace):
path_segments = list(filepath.split(os.path.sep))
path_segments = [p for p in path_segments if p]
path_segments[-1] = os.path.splitext(path_segments[-1])[0]
import_path = '.'.join(path_segments)
yield import_module(import_path)
def load(self, namespace):
modules = self._findPluginModules(namespace)
return list(modules)
| """Facility to load plugins."""
import sys
import os
from importlib import import_module
class StraightPluginLoader(object):
"""Performs the work of locating and loading straight plugins.
This looks for plugins in every location in the import path.
"""
def _findPluginFilePaths(self, namespace):
already_seen = set()
# Look in each location in the path
for path in sys.path:
# Within this, we want to look for a package for the namespace
namespace_rel_path = namespace.replace(".", os.path.sep)
namespace_path = os.path.join(path, namespace_rel_path)
if os.path.exists(namespace_path):
for possible in os.listdir(namespace_path):
base, ext = os.path.splitext(possible)
if base == '__init__' or ext != '.py':
continue
if base not in already_seen:
already_seen.add(base)
yield os.path.join(namespace, possible)
def _findPluginModules(self, namespace):
for filepath in self._findPluginFilePaths(namespace):
path_segments = list(filepath.split(os.path.sep))
path_segments = [p for p in path_segments if p]
path_segments[-1] = os.path.splitext(path_segments[-1])[0]
import_path = '.'.join(path_segments)
yield import_module(import_path)
def load(self, namespace):
"""Load all modules found in a namespace"""
modules = self._findPluginModules(namespace)
return list(modules)
| Add docstring to load() method | Add docstring to load() method
| Python | mit | ironfroggy/straight.plugin,pombredanne/straight.plugin | ---
+++
@@ -39,6 +39,8 @@
yield import_module(import_path)
def load(self, namespace):
+ """Load all modules found in a namespace"""
+
modules = self._findPluginModules(namespace)
return list(modules) |
be52bd5e578c54a816f8b786da5d8cf22fcc3ca8 | paver_ext/python_requirements.py | paver_ext/python_requirements.py | # ============================================================================
# PAVER EXTENSION/UTILITY: Read PIP requirements files
# ============================================================================
# REQUIRES: paver >= 1.0
# DESCRIPTION:
# Provides some utility functions for paver.
#
# SEE ALSO:
# * http://pypi.python.org/pypi/Paver/
# * http://www.blueskyonmars.com/projects/paver/
# ============================================================================
from paver.easy import error
import os.path
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
requirements_file = open(filename, "r")
for line in requirements_file.readlines():
line = line.strip()
if not line or line.startswith("#"):
continue #< SKIP: EMPTY-LINE or COMMENT-LINE
package_requirements.append(line)
requirements_file.close()
return package_requirements
| # ============================================================================
# PAVER EXTENSION/UTILITY: Read PIP requirements files
# ============================================================================
# REQUIRES: paver >= 1.0
# REQUIRES: pkg_resources, fulfilled when setuptools or distribute is installed
# DESCRIPTION:
# Provides some utility functions for paver.
#
# SEE ALSO:
# * http://pypi.python.org/pypi/Paver/
# * http://www.blueskyonmars.com/projects/paver/
# ============================================================================
from paver.easy import error
import os.path
import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
# ----------------------------------------------------------------------------
def read_requirements(*filenames):
"""
Read PIP "requirements*.txt" files.
These files contains python package requirements.
:param filenames: List of requirement files to read.
:returns: List of packages/package requirements (list-of-strings).
"""
package_requirements = []
for filename in filenames:
if not os.path.exists(filename):
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
with open(filename, "r") as f:
requirements = pkg_resources.parse_requirements(f.read())
package_requirements.extend(requirements)
# # -- NORMAL CASE:
# requirements_file = open(filename, "r")
# for line in requirements_file.readlines():
# line = line.strip()
# if not line or line.startswith("#"):
# continue #< SKIP: EMPTY-LINE or COMMENT-LINE
# package_requirements.append(line)
# requirements_file.close()
return package_requirements
| Use pkg_resources.parse_requirements() to simplify parsing. | Use pkg_resources.parse_requirements() to simplify parsing.
| Python | bsd-2-clause | jenisys/behave,jenisys/behave | ---
+++
@@ -2,6 +2,7 @@
# PAVER EXTENSION/UTILITY: Read PIP requirements files
# ============================================================================
# REQUIRES: paver >= 1.0
+# REQUIRES: pkg_resources, fulfilled when setuptools or distribute is installed
# DESCRIPTION:
# Provides some utility functions for paver.
#
@@ -12,6 +13,7 @@
from paver.easy import error
import os.path
+import pkg_resources
# ----------------------------------------------------------------------------
# UTILS:
@@ -30,11 +32,15 @@
error("REQUIREMENT-FILE %s not found" % filename)
continue
# -- NORMAL CASE:
- requirements_file = open(filename, "r")
- for line in requirements_file.readlines():
- line = line.strip()
- if not line or line.startswith("#"):
- continue #< SKIP: EMPTY-LINE or COMMENT-LINE
- package_requirements.append(line)
- requirements_file.close()
+ with open(filename, "r") as f:
+ requirements = pkg_resources.parse_requirements(f.read())
+ package_requirements.extend(requirements)
+# # -- NORMAL CASE:
+# requirements_file = open(filename, "r")
+# for line in requirements_file.readlines():
+# line = line.strip()
+# if not line or line.startswith("#"):
+# continue #< SKIP: EMPTY-LINE or COMMENT-LINE
+# package_requirements.append(line)
+# requirements_file.close()
return package_requirements |
1a0eaa4aaab42fbdc26a4e5ab460d0042fa19509 | frappe/website/doctype/blog_post/test_blog_post.py | frappe/website/doctype/blog_post/test_blog_post.py | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.tests.test_website import set_request
from frappe.website.render import render
class TestBlogPost(unittest.TestCase):
def test_generator_view(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 1, 'route': ('!=', '')}, limit =1)
set_request(path=pages[0].route)
response = render()
self.assertTrue(response.status_code, 200)
html = response.get_data()
self.assertTrue('<article class="blog-content" itemscope itemtype="http://schema.org/BlogPosting">' in html)
def test_generator_not_found(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 0}, limit =1)
frappe.db.set_value('Blog Post', pages[0].name, 'route', 'test-route-000')
set_request(path='test-route-000')
response = render()
self.assertTrue(response.status_code, 404)
| # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
import unittest
from frappe.tests.test_website import set_request
from frappe.website.render import render
class TestBlogPost(unittest.TestCase):
def test_generator_view(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 1, 'route': ('!=', '')}, limit =1)
set_request(path=pages[0].route)
response = render()
self.assertTrue(response.status_code, 200)
html = response.get_data().decode()
self.assertTrue('<article class="blog-content" itemscope itemtype="http://schema.org/BlogPosting">' in html)
def test_generator_not_found(self):
pages = frappe.get_all('Blog Post', fields=['name', 'route'],
filters={'published': 0}, limit =1)
frappe.db.set_value('Blog Post', pages[0].name, 'route', 'test-route-000')
set_request(path='test-route-000')
response = render()
self.assertTrue(response.status_code, 404)
| Convert rendered html to unicode before substring matching | Convert rendered html to unicode before substring matching
| Python | mit | tmimori/frappe,paurosello/frappe,ESS-LLP/frappe,RicardoJohann/frappe,bohlian/frappe,bohlian/frappe,saurabh6790/frappe,mbauskar/frappe,adityahase/frappe,mbauskar/frappe,tundebabzy/frappe,yashodhank/frappe,bohlian/frappe,maxtorete/frappe,StrellaGroup/frappe,adityahase/frappe,frappe/frappe,ESS-LLP/frappe,maxtorete/frappe,mbauskar/frappe,paurosello/frappe,saurabh6790/frappe,almeidapaulopt/frappe,tundebabzy/frappe,vjFaLk/frappe,frappe/frappe,manassolanki/frappe,chdecultot/frappe,tmimori/frappe,manassolanki/frappe,adityahase/frappe,almeidapaulopt/frappe,adityahase/frappe,vjFaLk/frappe,maxtorete/frappe,tmimori/frappe,mhbu50/frappe,mbauskar/frappe,chdecultot/frappe,yashodhank/frappe,neilLasrado/frappe,StrellaGroup/frappe,mhbu50/frappe,vjFaLk/frappe,yashodhank/frappe,StrellaGroup/frappe,frappe/frappe,almeidapaulopt/frappe,manassolanki/frappe,neilLasrado/frappe,chdecultot/frappe,vjFaLk/frappe,paurosello/frappe,maxtorete/frappe,tundebabzy/frappe,tundebabzy/frappe,RicardoJohann/frappe,ESS-LLP/frappe,RicardoJohann/frappe,saurabh6790/frappe,tmimori/frappe,manassolanki/frappe,paurosello/frappe,neilLasrado/frappe,bohlian/frappe,saurabh6790/frappe,ESS-LLP/frappe,chdecultot/frappe,mhbu50/frappe,mhbu50/frappe,yashodhank/frappe,neilLasrado/frappe,RicardoJohann/frappe,almeidapaulopt/frappe | ---
+++
@@ -17,7 +17,7 @@
self.assertTrue(response.status_code, 200)
- html = response.get_data()
+ html = response.get_data().decode()
self.assertTrue('<article class="blog-content" itemscope itemtype="http://schema.org/BlogPosting">' in html)
def test_generator_not_found(self): |
c2ab12f64ba1660c86b205fb15cf222261ac6f6d | gaphor/SysML/requirements/tests/test_connectors.py | gaphor/SysML/requirements/tests/test_connectors.py | import pytest
from gaphor.diagram.tests.fixtures import allow, connect, disconnect
from gaphor.SysML import sysml
from gaphor.SysML.requirements.relationships import (
DeriveReqtItem,
RefineItem,
SatisfyItem,
TraceItem,
VerifyItem,
)
from gaphor.SysML.requirements.requirement import RequirementItem
@pytest.mark.parametrize(
"item_class", [DeriveReqtItem, RefineItem, SatisfyItem, TraceItem, VerifyItem,]
)
def test_relation_allow_connect_disconnect_cycle(diagram, element_factory, item_class):
req1 = element_factory.create(sysml.Requirement)
req2 = element_factory.create(sysml.Requirement)
req_item1 = diagram.create(RequirementItem, subject=req1)
req_item2 = diagram.create(RequirementItem, subject=req2)
relation = diagram.create(item_class)
assert allow(relation, relation.handles()[0], req_item1)
assert allow(relation, relation.handles()[1], req_item2)
connect(relation, relation.handles()[0], req_item1)
connect(relation, relation.handles()[1], req_item2)
assert relation.subject
assert relation.subject.sourceContext is req_item1.subject
assert relation.subject.targetContext is req_item2.subject
disconnect(relation, relation.handles()[0])
assert not relation.subject
| import pytest
from gaphor.diagram.tests.fixtures import allow, connect, disconnect
from gaphor.SysML import sysml
from gaphor.SysML.requirements.relationships import (
DeriveReqtItem,
RefineItem,
SatisfyItem,
TraceItem,
VerifyItem,
)
from gaphor.SysML.requirements.requirement import RequirementItem
@pytest.mark.parametrize(
"item_class", [DeriveReqtItem, RefineItem, SatisfyItem, TraceItem, VerifyItem]
)
def test_relation_allow_connect_disconnect_cycle(diagram, element_factory, item_class):
req1 = element_factory.create(sysml.Requirement)
req2 = element_factory.create(sysml.Requirement)
req_item1 = diagram.create(RequirementItem, subject=req1)
req_item2 = diagram.create(RequirementItem, subject=req2)
relation = diagram.create(item_class)
assert allow(relation, relation.handles()[0], req_item1)
assert allow(relation, relation.handles()[1], req_item2)
connect(relation, relation.handles()[0], req_item1)
connect(relation, relation.handles()[1], req_item2)
assert relation.subject
assert relation.subject.sourceContext is req_item1.subject
assert relation.subject.targetContext is req_item2.subject
disconnect(relation, relation.handles()[0])
assert not relation.subject
| Fix flake8 error for missing whitespace | Fix flake8 error for missing whitespace
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | ---
+++
@@ -13,7 +13,7 @@
@pytest.mark.parametrize(
- "item_class", [DeriveReqtItem, RefineItem, SatisfyItem, TraceItem, VerifyItem,]
+ "item_class", [DeriveReqtItem, RefineItem, SatisfyItem, TraceItem, VerifyItem]
)
def test_relation_allow_connect_disconnect_cycle(diagram, element_factory, item_class):
req1 = element_factory.create(sysml.Requirement) |
dcf11d2d26519cdea10813530d2bde85f8fe8180 | Python/Tests/TestData/DjangoProject/Oar/views.py | Python/Tests/TestData/DjangoProject/Oar/views.py | from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = Context({
'latest_poll_list': latest_poll_list,
})
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = Context({
'colors': ['red', 'blue', 'green']
})
return HttpResponse(t.render(c))
| from django.template import Context, loader
# Create your views here.
from django.http import HttpResponse
from Oar.models import Poll
from django.http import HttpResponse
def main(request):
return HttpResponse('<html><body>Hello world!</body></html>')
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
c = {
'latest_poll_list': latest_poll_list,
}
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
c = {
'colors': ['red', 'blue', 'green']
}
return HttpResponse(t.render(c))
| Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django. | Use a dict instead of Context instance in DjangoDebuggerTests test project, to avoid a TypeError in latest version of Django.
| Python | apache-2.0 | int19h/PTVS,huguesv/PTVS,zooba/PTVS,Microsoft/PTVS,Microsoft/PTVS,zooba/PTVS,Microsoft/PTVS,zooba/PTVS,zooba/PTVS,int19h/PTVS,int19h/PTVS,Microsoft/PTVS,Microsoft/PTVS,Microsoft/PTVS,int19h/PTVS,zooba/PTVS,zooba/PTVS,huguesv/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,int19h/PTVS,huguesv/PTVS,huguesv/PTVS | ---
+++
@@ -11,29 +11,29 @@
def index(request):
latest_poll_list = Poll.objects.all().order_by('-pub_date')[:5]
t = loader.get_template('polls/index.html')
- c = Context({
+ c = {
'latest_poll_list': latest_poll_list,
- })
+ }
return HttpResponse(t.render(c))
def loop(request):
t = loader.get_template('polls/loop.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
def loop_nobom(request):
t = loader.get_template('polls/loop_nobom.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
def loop2(request):
t = loader.get_template('polls/loop2.html')
- c = Context({
+ c = {
'colors': ['red', 'blue', 'green']
- })
+ }
return HttpResponse(t.render(c))
|
bb3dfe39075876107fa992a66c16a5566a442d23 | polymorphic_auth/tests.py | polymorphic_auth/tests.py | from django.test import TestCase
# Create your tests here.
| import re
from django.contrib.admin.sites import AdminSite
from django_webtest import WebTest
from django.core.urlresolvers import reverse
from .usertypes.email.models import EmailUser
class TestUserAdminBaseFieldsets(WebTest):
"""
Tests a fix applied to ensure `base_fieldsets` are not
lost in `UserChildAdmin` after calling `get_form()` with
no existing instance (i.e. for a new user).
"""
csrf_checks = False
def setUp(self):
self.site = AdminSite()
self.staff_user = EmailUser.objects.create(
email='staff@test.com',
is_staff=True,
is_active=True,
is_superuser=True,
)
self.staff_user.set_password('abc123')
self.staff_user.save()
def test_user_base_fieldsets(self):
# edit our staff user and capture the form response.
response = self.app.get(
reverse('admin:polymorphic_auth_user_change',
args=(self.staff_user.pk,)),
user=self.staff_user
).maybe_follow(user=self.staff_user)
form1_response = response.form.text
# create a another new user
response = self.app.get(
reverse('admin:polymorphic_auth_user_add'),
user=self.staff_user
).maybe_follow(user=self.staff_user)
form = response.form
form['email'] = 'test@test.com'
form['password1'] = 'testpassword'
form['password2'] = 'testpassword'
form.submit(user=self.staff_user)
# Edit our staff user again and capture the form response.
response = self.app.get(
reverse('admin:polymorphic_auth_user_change',
args=(self.staff_user.pk,)),
user=self.staff_user
)
form2_response = response.form.text
# Rip out fields we expect to differ between the two responses.
form1_response = re.sub(
r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form1_response)
form1_response = re.sub(
r'<input class="vTimeField" (.*?)/>', '', form1_response)
form1_response = re.sub(
r'<input id="initial-id_last_login_1" (.*?)/>', '', form1_response)
form2_response = re.sub(
r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form2_response)
form2_response = re.sub(
r'<input class="vTimeField" (.*?)/>', '', form2_response)
form2_response = re.sub(
r'<input id="initial-id_last_login_1" (.*?)/>', '', form2_response)
# Form output should be identical to the first.
# This will not be the case if the base_fieldsets have been lost.
self.assertEqual(form1_response, form2_response)
| Add regression test for base_fieldsets fix. | Add regression test for base_fieldsets fix.
| Python | mit | ixc/django-polymorphic-auth | ---
+++
@@ -1,3 +1,78 @@
-from django.test import TestCase
+import re
+from django.contrib.admin.sites import AdminSite
+from django_webtest import WebTest
+from django.core.urlresolvers import reverse
+from .usertypes.email.models import EmailUser
-# Create your tests here.
+
+class TestUserAdminBaseFieldsets(WebTest):
+ """
+ Tests a fix applied to ensure `base_fieldsets` are not
+ lost in `UserChildAdmin` after calling `get_form()` with
+ no existing instance (i.e. for a new user).
+ """
+ csrf_checks = False
+
+ def setUp(self):
+ self.site = AdminSite()
+ self.staff_user = EmailUser.objects.create(
+ email='staff@test.com',
+ is_staff=True,
+ is_active=True,
+ is_superuser=True,
+ )
+ self.staff_user.set_password('abc123')
+ self.staff_user.save()
+
+ def test_user_base_fieldsets(self):
+
+ # edit our staff user and capture the form response.
+
+ response = self.app.get(
+ reverse('admin:polymorphic_auth_user_change',
+ args=(self.staff_user.pk,)),
+ user=self.staff_user
+ ).maybe_follow(user=self.staff_user)
+ form1_response = response.form.text
+
+ # create a another new user
+
+ response = self.app.get(
+ reverse('admin:polymorphic_auth_user_add'),
+ user=self.staff_user
+ ).maybe_follow(user=self.staff_user)
+ form = response.form
+ form['email'] = 'test@test.com'
+ form['password1'] = 'testpassword'
+ form['password2'] = 'testpassword'
+ form.submit(user=self.staff_user)
+
+ # Edit our staff user again and capture the form response.
+
+ response = self.app.get(
+ reverse('admin:polymorphic_auth_user_change',
+ args=(self.staff_user.pk,)),
+ user=self.staff_user
+ )
+ form2_response = response.form.text
+
+ # Rip out fields we expect to differ between the two responses.
+
+ form1_response = re.sub(
+ r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form1_response)
+ form1_response = re.sub(
+ r'<input class="vTimeField" (.*?)/>', '', form1_response)
+ form1_response = re.sub(
+ r'<input id="initial-id_last_login_1" (.*?)/>', '', form1_response)
+
+ form2_response = re.sub(
+ r'<input name="csrfmiddlewaretoken" (.*?)/>', '', form2_response)
+ form2_response = re.sub(
+ r'<input class="vTimeField" (.*?)/>', '', form2_response)
+ form2_response = re.sub(
+ r'<input id="initial-id_last_login_1" (.*?)/>', '', form2_response)
+
+ # Form output should be identical to the first.
+ # This will not be the case if the base_fieldsets have been lost.
+
+ self.assertEqual(form1_response, form2_response) |
e46b974de8dee93b9bee08c752b5bf3fc5194ec9 | msmbuilder/project_templates/1-gather-metadata-plot.py | msmbuilder/project_templates/1-gather-metadata-plot.py | """Plot metadata info
{{header}}
"""
from subprocess import run
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
from msmbuilder.io import load_meta, render_meta
sns.set_style('ticks')
colors = sns.color_palette()
## Load
meta = load_meta()
## Plot logic
def plot_lengths(ax):
lengths_ns = meta['nframes'] / meta['step_ps'] / 1000
ax.hist(lengths_ns)
ax.set_xlabel("Lenths / ns", fontsize=16)
ax.set_ylabel("Count", fontsize=16)
total_label = ("Total length: {us:.2e}"
.format(us=np.sum(lengths_ns) / 1000))
total_label += r" / $\mathrm{\mu s}$"
ax.annotate(total_label,
xy=(0.05, 0.95),
xycoords='axes fraction',
fontsize=18,
va='top',
)
## Plot
fig, ax = plt.subplots(figsize=(7, 5))
plot_lengths(ax)
fig.tight_layout()
fig.savefig("lengths.pdf")
run(['xdg-open', 'lengths.pdf'])
## Save metadata as html table
render_meta(meta, 'meta.pandas.html')
| """Plot metadata info
{{header}}
"""
from subprocess import run
import numpy as np
import seaborn as sns
from matplotlib import pyplot as plt
from msmbuilder.io import load_meta, render_meta
sns.set_style('ticks')
colors = sns.color_palette()
## Load
meta = load_meta()
## Plot logic
def plot_lengths(ax):
lengths_ns = meta['nframes'] * (meta['step_ps'] / 1000)
ax.hist(lengths_ns)
ax.set_xlabel("Lenths / ns", fontsize=16)
ax.set_ylabel("Count", fontsize=16)
total_label = ("Total length: {us:.2e}"
.format(us=np.sum(lengths_ns) / 1000))
total_label += r" / $\mathrm{\mu s}$"
ax.annotate(total_label,
xy=(0.05, 0.95),
xycoords='axes fraction',
fontsize=18,
va='top',
)
## Plot
fig, ax = plt.subplots(figsize=(7, 5))
plot_lengths(ax)
fig.tight_layout()
fig.savefig("lengths.pdf")
run(['xdg-open', 'lengths.pdf'])
## Save metadata as html table
render_meta(meta, 'meta.pandas.html')
| Fix bug in gather-metadata-plot script | Fix bug in gather-metadata-plot script
| Python | lgpl-2.1 | brookehus/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,msultan/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,brookehus/msmbuilder,mpharrigan/mixtape,cxhernandez/msmbuilder,cxhernandez/msmbuilder,peastman/msmbuilder,cxhernandez/msmbuilder,msmbuilder/msmbuilder,peastman/msmbuilder,Eigenstate/msmbuilder,Eigenstate/msmbuilder,msmbuilder/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,cxhernandez/msmbuilder,msultan/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,cxhernandez/msmbuilder,mpharrigan/mixtape,mpharrigan/mixtape,brookehus/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,mpharrigan/mixtape,peastman/msmbuilder,brookehus/msmbuilder,msultan/msmbuilder,dr-nate/msmbuilder,dr-nate/msmbuilder,dr-nate/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,msmbuilder/msmbuilder,msultan/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,msmbuilder/msmbuilder,dr-nate/msmbuilder,peastman/msmbuilder | ---
+++
@@ -19,7 +19,7 @@
## Plot logic
def plot_lengths(ax):
- lengths_ns = meta['nframes'] / meta['step_ps'] / 1000
+ lengths_ns = meta['nframes'] * (meta['step_ps'] / 1000)
ax.hist(lengths_ns)
ax.set_xlabel("Lenths / ns", fontsize=16)
ax.set_ylabel("Count", fontsize=16) |
de7e29622d6b8f18c25e483bc8bdac629e07964c | trex/urls.py | trex/urls.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from trex.views import project
urlpatterns = patterns(
'',
url(r"^$",
TemplateView.as_view(template_name="index.html"),
name="index",
),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
| # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from django.conf.urls import patterns, url
from django.views.generic import TemplateView
from trex.views import project
urlpatterns = patterns(
'',
url(r"^$",
TemplateView.as_view(template_name="index.html"),
name="index",
),
url(r"^api/1/projects/$",
project.ProjectListCreateAPIView.as_view(),
name="project-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/$",
project.ProjectDetailAPIView.as_view(),
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
name="project-entries-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"),
url(r"^api/1/entries/(?P<pk>[0-9]+)/$",
project.EntryDetailAPIView.as_view(),
name="entry-detail"),
)
| Update name of url mapping for ProjectEntriesListAPIView | Update name of url mapping for ProjectEntriesListAPIView
| Python | mit | bjoernricks/trex,bjoernricks/trex | ---
+++
@@ -24,7 +24,7 @@
name="project-detail"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/entries$",
project.ProjectEntriesListAPIView.as_view(),
- name="project-detail"),
+ name="project-entries-list"),
url(r"^api/1/projects/(?P<pk>[0-9]+)/zeiterfassung/$",
project.ProjectZeiterfassungAPIView.as_view(),
name="project-zeiterfassung"), |
7d81de637288ca694c139b3a7830f6e8ca00aa01 | gargoyle/__init__.py | gargoyle/__init__.py | """
gargoyle
~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from gargoyle.manager import gargoyle
try:
VERSION = __import__('pkg_resources').get_distribution('gargoyle-yplan').version
except Exception, e:
VERSION = 'unknown'
__all__ = ('gargoyle', 'autodiscover', 'VERSION')
def autodiscover():
"""
Auto-discover INSTALLED_APPS' gargoyle modules and fail silently when
not present. This forces an import on them to register any gargoyle bits they
may want.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
for app in settings.INSTALLED_APPS:
# Attempt to import the app's gargoyle module.
before_import_registry = copy.copy(gargoyle._registry)
try:
import_module('%s.gargoyle' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
gargoyle._registry = before_import_registry
# load builtins
__import__('gargoyle.builtins')
| """
gargoyle
~~~~~~~~
:copyright: (c) 2010 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from gargoyle.manager import gargoyle
try:
VERSION = __import__('pkg_resources').get_distribution('gargoyle-yplan').version
except Exception, e:
VERSION = 'unknown'
__all__ = ('gargoyle', 'autodiscover', 'VERSION')
def autodiscover():
"""
Auto-discover INSTALLED_APPS' gargoyle modules and fail silently when
not present. This forces an import on them to register any gargoyle bits they
may want.
"""
import copy
from django.conf import settings
from importlib import import_module
for app in settings.INSTALLED_APPS:
# Attempt to import the app's gargoyle module.
before_import_registry = copy.copy(gargoyle._registry)
try:
import_module('%s.gargoyle' % app)
except:
# Reset the model registry to the state before the last import as
# this import will have to reoccur on the next request and this
# could raise NotRegistered and AlreadyRegistered exceptions
gargoyle._registry = before_import_registry
# load builtins
__import__('gargoyle.builtins')
| Switch to using python standard library importlib | Switch to using python standard library importlib
Available in Python 2.7+, which is all that Gargoyle now supports. The Django version is removed in 1.9.
| Python | apache-2.0 | nkovshov/gargoyle,YPlan/gargoyle,nkovshov/gargoyle,YPlan/gargoyle,roverdotcom/gargoyle,nkovshov/gargoyle,YPlan/gargoyle,roverdotcom/gargoyle,roverdotcom/gargoyle | ---
+++
@@ -24,7 +24,8 @@
"""
import copy
from django.conf import settings
- from django.utils.importlib import import_module
+
+ from importlib import import_module
for app in settings.INSTALLED_APPS:
# Attempt to import the app's gargoyle module. |
9baae7a5c633399fe25ca6961e992b50adcd72b4 | jacquard/service/base.py | jacquard/service/base.py | import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance.config = config
instance.request = request
instance.reverse = reverse
return instance
| import abc
import copy
import werkzeug.routing
class Endpoint(metaclass=abc.ABCMeta):
@abc.abstractproperty
def url(self):
pass
@abc.abstractclassmethod
def handle(self, **kwargs):
pass
def __call__(self, **kwargs):
return self.handle(**kwargs)
@property
def defaults(self):
return {}
def build_rule(self, name):
return werkzeug.routing.Rule(
self.url,
defaults=self.defaults,
endpoint=self,
)
def bind(self, config, request, reverse):
instance = copy.copy(self)
instance._config = config
instance._request = request
instance._reverse = reverse
return instance
@property
def config(self):
try:
return self._config
except AttributeError:
raise AttributeError(
"Unbound endpoint: `config` is only available on bound "
"endpoints",
)
@property
def request(self):
try:
return self._request
except AttributeError:
raise AttributeError(
"Unbound endpoint: `request` is only available on bound "
"endpoints",
)
def reverse(self, name, **kwargs):
try:
reverse = self.reverse
except AttributeError:
raise AttributeError(
"Unbound endpoint: `reverse` is only available on bound "
"endpoints",
)
return reverse(name, **kwargs)
| Use a level of indirection for helpful error messages | Use a level of indirection for helpful error messages
| Python | mit | prophile/jacquard,prophile/jacquard | ---
+++
@@ -28,7 +28,37 @@
def bind(self, config, request, reverse):
instance = copy.copy(self)
- instance.config = config
- instance.request = request
- instance.reverse = reverse
+ instance._config = config
+ instance._request = request
+ instance._reverse = reverse
return instance
+
+ @property
+ def config(self):
+ try:
+ return self._config
+ except AttributeError:
+ raise AttributeError(
+ "Unbound endpoint: `config` is only available on bound "
+ "endpoints",
+ )
+
+ @property
+ def request(self):
+ try:
+ return self._request
+ except AttributeError:
+ raise AttributeError(
+ "Unbound endpoint: `request` is only available on bound "
+ "endpoints",
+ )
+
+ def reverse(self, name, **kwargs):
+ try:
+ reverse = self.reverse
+ except AttributeError:
+ raise AttributeError(
+ "Unbound endpoint: `reverse` is only available on bound "
+ "endpoints",
+ )
+ return reverse(name, **kwargs) |
bd56d5fb8e7c312e65e0fb9ea686b6fe2500e724 | registration/__init__.py | registration/__init__.py | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
def get_backend():
"""
Return an instance of the registration backend for use on this
site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
``django.core.exceptions.ImproperlyConfigured`` if the specified
backend cannot be located.
"""
i = settings.REGISTRATION_BACKEND.rfind('.')
module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
try:
mod = import_module(module)
except ImportError, e:
raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
try:
backend_class = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
return backend_class()
| Add utility function for retrieving the active registration backend. | Add utility function for retrieving the active registration backend.
| Python | bsd-3-clause | remarkablerocket/django-mailinglist-registration,remarkablerocket/django-mailinglist-registration | ---
+++
@@ -0,0 +1,23 @@
+from django.conf import settings
+from django.core.exceptions import ImproperlyConfigured
+from django.utils.importlib import import_module
+
+def get_backend():
+ """
+ Return an instance of the registration backend for use on this
+ site, as determined by the ``REGISTRATION_BACKEND`` setting. Raise
+ ``django.core.exceptions.ImproperlyConfigured`` if the specified
+ backend cannot be located.
+
+ """
+ i = settings.REGISTRATION_BACKEND.rfind('.')
+ module, attr = settings.REGISTRATION_BACKEND[:i], settings.REGISTRATION_BACKEND[i+1:]
+ try:
+ mod = import_module(module)
+ except ImportError, e:
+ raise ImproperlyConfigured('Error loading registration backend %s: "%s"' % (module, e))
+ try:
+ backend_class = getattr(mod, attr)
+ except AttributeError:
+ raise ImproperlyConfigured('Module "%s" does not define a registration backend named "%s"' % (module, attr))
+ return backend_class() | |
9c40c070b6c55d6eda2cabf4c1ebe062cebcfe8f | flask_controllers/GameModes.py | flask_controllers/GameModes.py | from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from python_cowbull_game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
| from flask import request
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
from Game.GameObject import GameObject
class GameModes(MethodView):
def get(self):
textonly = request.args.get('textmode', None)
if textonly:
return build_response(
html_status=200,
response_data=GameObject.game_modes,
response_mimetype="application/json"
)
digits = GameObject.digits_used
guesses = GameObject.guesses_allowed
game_modes = GameObject.game_modes
# game_modes = [mode for mode in GameObject.digits_used]
return_list = []
for mode in game_modes:
return_list.append(
{
"mode": mode,
"digits": digits[mode],
"guesses": guesses[mode]
}
)
return build_response(
html_status=200,
response_data=return_list,
response_mimetype="application/json"
)
| Update to use inheritance for GameObject | Update to use inheritance for GameObject
| Python | apache-2.0 | dsandersAzure/python_cowbull_server,dsandersAzure/python_cowbull_server | ---
+++
@@ -2,7 +2,7 @@
from flask.views import MethodView
from flask_helpers.build_response import build_response
from flask_helpers.ErrorHandler import ErrorHandler
-from python_cowbull_game.GameObject import GameObject
+from Game.GameObject import GameObject
class GameModes(MethodView): |
a1d9e1ed4ac8b7542b6430f84b2ed9197d45d577 | fireplace/cards/wog/priest.py | fireplace/cards/wog/priest.py | from ..utils import *
##
# Minions
| from ..utils import *
##
# Minions
class OG_234:
"Darkshire Alchemist"
play = Heal(TARGET, 5)
class OG_335:
"Shifting Shade"
deathrattle = Give(CONTROLLER, Copy(RANDOM(ENEMY_DECK)))
##
# Spells
class OG_094:
"Power Word: Tentacles"
play = Buff(TARGET, "OG_094e")
OG_094e = buff(+2, +6)
| Implement Darkshire Alchemist, Shifting Shade, Power Word: Tentacles | Implement Darkshire Alchemist, Shifting Shade, Power Word: Tentacles
| Python | agpl-3.0 | jleclanche/fireplace,NightKev/fireplace,beheh/fireplace | ---
+++
@@ -4,4 +4,21 @@
##
# Minions
+class OG_234:
+ "Darkshire Alchemist"
+ play = Heal(TARGET, 5)
+
+class OG_335:
+ "Shifting Shade"
+ deathrattle = Give(CONTROLLER, Copy(RANDOM(ENEMY_DECK)))
+
+
+##
+# Spells
+
+class OG_094:
+ "Power Word: Tentacles"
+ play = Buff(TARGET, "OG_094e")
+
+OG_094e = buff(+2, +6) |
e3d00e6c3e62e5cfa457529fc2dddfb814382db6 | packages/dcos-integration-test/extra/test_metronome.py | packages/dcos-integration-test/extra/test_metronome.py | def test_metronome(dcos_api_session):
job = {
'description': 'Test Metronome API regressions',
'id': 'test.metronome',
'run': {
'cmd': 'ls',
'docker': {'image': 'busybox:latest'},
'cpus': 1,
'mem': 512,
'user': 'nobody',
'restart': {'policy': 'ON_FAILURE'}
}
}
dcos_api_session.metronome_one_off(job)
| def test_metronome(dcos_api_session):
job = {
'description': 'Test Metronome API regressions',
'id': 'test.metronome',
'run': {
'cmd': 'ls',
'docker': {'image': 'busybox:latest'},
'cpus': 1,
'mem': 512,
'disk' 0,
'user': 'nobody',
'restart': {'policy': 'ON_FAILURE'}
}
}
dcos_api_session.metronome_one_off(job)
| Fix the Metronome integration test | Fix the Metronome integration test
| Python | apache-2.0 | dcos/dcos,amitaekbote/dcos,branden/dcos,mesosphere-mergebot/mergebot-test-dcos,surdy/dcos,mesosphere-mergebot/mergebot-test-dcos,surdy/dcos,kensipe/dcos,mellenburg/dcos,mellenburg/dcos,GoelDeepak/dcos,dcos/dcos,mellenburg/dcos,mesosphere-mergebot/dcos,kensipe/dcos,amitaekbote/dcos,kensipe/dcos,GoelDeepak/dcos,mesosphere-mergebot/dcos,branden/dcos,branden/dcos,mesosphere-mergebot/dcos,mellenburg/dcos,branden/dcos,dcos/dcos,mesosphere-mergebot/mergebot-test-dcos,kensipe/dcos,mesosphere-mergebot/dcos,amitaekbote/dcos,dcos/dcos,dcos/dcos,GoelDeepak/dcos,surdy/dcos,amitaekbote/dcos,surdy/dcos,GoelDeepak/dcos,mesosphere-mergebot/mergebot-test-dcos | ---
+++
@@ -7,6 +7,7 @@
'docker': {'image': 'busybox:latest'},
'cpus': 1,
'mem': 512,
+ 'disk' 0,
'user': 'nobody',
'restart': {'policy': 'ON_FAILURE'}
} |
f57c9643a32cca012fdccac40899c6de38e35af9 | ass/ets/__init__.py | ass/ets/__init__.py |
from bundles import Environment, Assets, Bundle, Manifest
import filters as f
from options import Option, Options, Undefined, dict_getter
|
from bundles import Environment, Assets, Bundle, Manifest
import filters as f
from options import Option, Options, Undefined, dict_getter
from pipeable import worker
| Make @worker available for import on ass.ets. | Make @worker available for import on ass.ets.
| Python | bsd-2-clause | kaste/ass.ets,kaste/ass.ets | ---
+++
@@ -2,4 +2,5 @@
from bundles import Environment, Assets, Bundle, Manifest
import filters as f
from options import Option, Options, Undefined, dict_getter
+from pipeable import worker
|
c1dff6850a0d39c39b0c337f4f5473efb77fc075 | tests/utils/test_forms.py | tests/utils/test_forms.py | import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class TestRedirectForm(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.redirect_form = RedirectForm()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.redirect_form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.redirect_form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.redirect_form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.redirect_form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.redirect_form.get_redirect_target(), 'safe_internal_link')
| import unittest
from app import create_app, db
from app.utils.forms import RedirectForm
class FormTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
self.app_ctx.push()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
self.app_ctx.pop()
class TestRedirectForm(FormTestCase):
def setUp(self):
super().setUp()
self.form = RedirectForm()
def test_is_safe_url(self):
with self.app.test_request_context():
self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
self.assertTrue(self.form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
self.assertIsNone(self.form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link')
| Move setUp and tearDown methods into general FormTestCase class | Move setUp and tearDown methods into general FormTestCase class
| Python | mit | Encrylize/flask-blogger,Encrylize/flask-blogger,Encrylize/flask-blogger | ---
+++
@@ -4,11 +4,10 @@
from app.utils.forms import RedirectForm
-class TestRedirectForm(unittest.TestCase):
+class FormTestCase(unittest.TestCase):
def setUp(self):
self.app = create_app('testing')
self.app_ctx = self.app.app_context()
- self.redirect_form = RedirectForm()
self.app_ctx.push()
db.create_all()
@@ -17,16 +16,22 @@
db.drop_all()
self.app_ctx.pop()
+
+class TestRedirectForm(FormTestCase):
+ def setUp(self):
+ super().setUp()
+ self.form = RedirectForm()
+
def test_is_safe_url(self):
with self.app.test_request_context():
- self.assertFalse(self.redirect_form.is_safe_url('http://externalsite.com'))
- self.assertTrue(self.redirect_form.is_safe_url('http://' + self.app.config[
+ self.assertFalse(self.form.is_safe_url('http://externalsite.com'))
+ self.assertTrue(self.form.is_safe_url('http://' + self.app.config[
'SERVER_NAME']))
- self.assertTrue(self.redirect_form.is_safe_url('safe_internal_link'))
+ self.assertTrue(self.form.is_safe_url('safe_internal_link'))
def test_get_redirect_target(self):
with self.app.test_request_context('/?next=http://externalsite.com'):
- self.assertIsNone(self.redirect_form.get_redirect_target())
+ self.assertIsNone(self.form.get_redirect_target())
with self.app.test_request_context('/?next=safe_internal_link'):
- self.assertEquals(self.redirect_form.get_redirect_target(), 'safe_internal_link')
+ self.assertEquals(self.form.get_redirect_target(), 'safe_internal_link') |
fdd1604ae64d72dc2391abe137adba07da830bcd | imagersite/imager_profile/models.py | imagersite/imager_profile/models.py | """Models."""
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
def is_active(self):
"""Return if the user can log in."""
return self.user.is_active
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
| """Models."""
from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
class ActiveUserManager(models.Manager):
"""Manager to grab active users."""
def get_query_set(self):
"""Return only active users."""
return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
"""Imager Profile Model."""
camera_model = models.CharField(max_length=200)
photography_type = models.TextField()
# friends = models.ManyToManyField('self')
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
# Need to have models.Manager since we overwrote default with ActiveUser
# Without it, we would have lost reference to 'objects'
objects = models.Manager()
active = ActiveUserManager()
@property
def is_active(self):
"""Return all instances of active ImagerProfile."""
return self.user.is_active
# We control the profile, don't have code for user
# If profile is deleted, user is deleted. We want the opposite.
# How do we do that?
# Idea of Signals (pyramid also has)
# Signals hook into the listener pattern (like event listeners)
# Imager profile exists, and gets removed (handelers.py)
# first arg(sender(class that sent signal), **kwargs)
# Must ensure errors aren't raised. Log problem, do nothing.
# If errors are raised, it will prevent other things from happening
# Must put signal code into a place where Django can execute it.
# in apps.py def ready(self): from imager_profile import handlers (will register handlers)
# In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
# now Django knows about handlers
| Add ability to access all 'objects' and only 'active' users | Add ability to access all 'objects' and only 'active' users
| Python | mit | DZwell/django-imager | ---
+++
@@ -1,9 +1,19 @@
"""Models."""
+from __future__ import unicode_literals
+
from django.db import models
from django.contrib.auth.models import User
# Create your models here.
+
+
+class ActiveUserManager(models.Manager):
+ """Manager to grab active users."""
+
+ def get_query_set(self):
+ """Return only active users."""
+ return super(ActiveUserManager, self).get_query_set().filter(user.is_active)
class ImagerProfile(models.Model):
@@ -15,20 +25,33 @@
region = models.CharField(max_length=200)
user = models.OneToOneField(User, unique=True, null=False)
+ # Need to have models.Manager since we overwrote default with ActiveUser
+ # Without it, we would have lost reference to 'objects'
+ objects = models.Manager()
+ active = ActiveUserManager()
+
+ @property
def is_active(self):
- """Return if the user can log in."""
+ """Return all instances of active ImagerProfile."""
return self.user.is_active
-
-
-class ActiveUserManager(models.Manager):
- """Manager to grab active users."""
-
- def get_query_set(self):
- """Return only active users."""
- return super(ActiveUserManager, self).get_query_set().filter(user.is_active())
+
+
+# We control the profile, don't have code for user
+# If profile is deleted, user is deleted. We want the opposite.
+# How do we do that?
+# Idea of Signals (pyramid also has)
+# Signals hook into the listener pattern (like event listeners)
+# Imager profile exists, and gets removed (handelers.py)
+# first arg(sender(class that sent signal), **kwargs)
+# Must ensure errors aren't raised. Log problem, do nothing.
+# If errors are raised, it will prevent other things from happening
+# Must put signal code into a place where Django can execute it.
+# in apps.py def ready(self): from imager_profile import handlers (will register handlers)
+# In init.py add default_app_config = 'imager_rofile.apps.ImagerProfileConfig'
+# now Django knows about handlers |
16e9987e680a6a44acdb14bd7554414dfe261056 | sale_automatic_workflow/models/stock_move.py | sale_automatic_workflow/models/stock_move.py | # -*- coding: utf-8 -*-
# © 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
# © 2013 Camptocamp SA (author: Guewen Baconnier)
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.model
def _prepare_picking_assign(self, move):
values = super(StockMove, self)._prepare_picking_assign(move)
if move.procurement_id.sale_line_id:
sale = move.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
| # -*- coding: utf-8 -*-
# © 2011 Akretion Sébastien BEAU <sebastien.beau@akretion.com>
# © 2013 Camptocamp SA (author: Guewen Baconnier)
# © 2016 Sodexis
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from odoo import api, models
class StockMove(models.Model):
_inherit = 'stock.move'
@api.multi
def _get_new_picking_values(self):
values = super(StockMove, self)._get_new_picking_values()
if self.procurement_id.sale_line_id:
sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values
| Fix API type & method name for picking values | Fix API type & method name for picking values
| Python | agpl-3.0 | kittiu/sale-workflow,kittiu/sale-workflow | ---
+++
@@ -10,10 +10,10 @@
class StockMove(models.Model):
_inherit = 'stock.move'
- @api.model
- def _prepare_picking_assign(self, move):
- values = super(StockMove, self)._prepare_picking_assign(move)
- if move.procurement_id.sale_line_id:
- sale = move.procurement_id.sale_line_id.order_id
+ @api.multi
+ def _get_new_picking_values(self):
+ values = super(StockMove, self)._get_new_picking_values()
+ if self.procurement_id.sale_line_id:
+ sale = self.procurement_id.sale_line_id.order_id
values['workflow_process_id'] = sale.workflow_process_id.id
return values |
5d83c039391b83ce16f7499f463554b89168a55f | cumulusci/cli/logger.py | cumulusci/cli/logger.py | """ CLI logger """
from __future__ import unicode_literals
import logging
import coloredlogs
import requests
def init_logger(log_requests=False):
""" Initialize the logger """
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger()
| """ CLI logger """
from __future__ import unicode_literals
import logging
import os
import sys
import requests
import coloredlogs
try:
import colorama
except ImportError:
# coloredlogs only installs colorama on Windows
pass
def init_logger(log_requests=False):
""" Initialize the logger """
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
if os.name == "nt" and "colorama" in sys.modules:
colorama.init()
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
logger.propagate = False
if log_requests:
requests.packages.urllib3.add_stderr_logger()
| Fix color escapes on Windows | Fix color escapes on Windows
Fixes #813
| Python | bsd-3-clause | SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI | ---
+++
@@ -2,9 +2,18 @@
from __future__ import unicode_literals
import logging
+import os
+import sys
+
+import requests
import coloredlogs
-import requests
+
+try:
+ import colorama
+except ImportError:
+ # coloredlogs only installs colorama on Windows
+ pass
def init_logger(log_requests=False):
@@ -13,6 +22,9 @@
logger = logging.getLogger(__name__.split(".")[0])
for handler in logger.handlers: # pragma: nocover
logger.removeHandler(handler)
+
+ if os.name == "nt" and "colorama" in sys.modules:
+ colorama.init()
formatter = coloredlogs.ColoredFormatter(fmt="%(asctime)s: %(message)s")
handler = logging.StreamHandler() |
c83a680603b83edafe61f6d41b34989c70a4e4ae | clowder/clowder/cli/save_controller.py | clowder/clowder/cli/save_controller.py | from cement.ext.ext_argparse import expose
from clowder.cli.abstract_base_controller import AbstractBaseController
class SaveController(AbstractBaseController):
class Meta:
label = 'save'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Create version of clowder.yaml for current repos'
arguments = [
(['version'], dict(help='version to save', metavar='VERSION'))
]
@expose(help="second-controller default command", hide=True)
def default(self):
print("Inside SecondController.default()")
| import os
import sys
from cement.ext.ext_argparse import expose
import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
from clowder.util.decorators import valid_clowder_yaml_required
from clowder.commands.util import (
validate_groups,
validate_projects_exist
)
from clowder.yaml.saving import save_yaml
class SaveController(AbstractBaseController):
class Meta:
label = 'save'
stacked_on = 'base'
stacked_type = 'nested'
description = 'Create version of clowder.yaml for current repos'
arguments = [
(['version'], dict(help='version to save', metavar='VERSION'))
]
@expose(help="second-controller default command", hide=True)
@valid_clowder_yaml_required
def default(self):
if self.app.pargs.version.lower() == 'default':
print(fmt.save_default_error(self.app.pargs.version))
sys.exit(1)
self.clowder_repo.print_status()
validate_projects_exist(self.clowder)
validate_groups(self.clowder.groups)
version_name = self.app.pargs.version.replace('/', '-') # Replace path separators with dashes
version_dir = os.path.join(self.clowder.root_directory, '.clowder', 'versions', version_name)
_make_dir(version_dir)
yaml_file = os.path.join(version_dir, 'clowder.yaml')
if os.path.exists(yaml_file):
print(fmt.save_version_exists_error(version_name, yaml_file) + '\n')
sys.exit(1)
print(fmt.save_version(version_name, yaml_file))
save_yaml(self.clowder.get_yaml(), yaml_file)
def _make_dir(directory):
"""Make directory if it doesn't exist
:param str directory: Directory path to create
:raise OSError:
"""
if not os.path.exists(directory):
try:
os.makedirs(directory)
except OSError as err:
if err.errno != os.errno.EEXIST:
raise
| Add `clowder save` logic to Cement controller | Add `clowder save` logic to Cement controller
| Python | mit | JrGoodle/clowder,JrGoodle/clowder,JrGoodle/clowder | ---
+++
@@ -1,6 +1,16 @@
+import os
+import sys
+
from cement.ext.ext_argparse import expose
+import clowder.util.formatting as fmt
from clowder.cli.abstract_base_controller import AbstractBaseController
+from clowder.util.decorators import valid_clowder_yaml_required
+from clowder.commands.util import (
+ validate_groups,
+ validate_projects_exist
+)
+from clowder.yaml.saving import save_yaml
class SaveController(AbstractBaseController):
@@ -14,5 +24,39 @@
]
@expose(help="second-controller default command", hide=True)
+ @valid_clowder_yaml_required
def default(self):
- print("Inside SecondController.default()")
+ if self.app.pargs.version.lower() == 'default':
+ print(fmt.save_default_error(self.app.pargs.version))
+ sys.exit(1)
+
+ self.clowder_repo.print_status()
+ validate_projects_exist(self.clowder)
+ validate_groups(self.clowder.groups)
+
+ version_name = self.app.pargs.version.replace('/', '-') # Replace path separators with dashes
+ version_dir = os.path.join(self.clowder.root_directory, '.clowder', 'versions', version_name)
+ _make_dir(version_dir)
+
+ yaml_file = os.path.join(version_dir, 'clowder.yaml')
+ if os.path.exists(yaml_file):
+ print(fmt.save_version_exists_error(version_name, yaml_file) + '\n')
+ sys.exit(1)
+
+ print(fmt.save_version(version_name, yaml_file))
+ save_yaml(self.clowder.get_yaml(), yaml_file)
+
+
+def _make_dir(directory):
+ """Make directory if it doesn't exist
+
+ :param str directory: Directory path to create
+ :raise OSError:
+ """
+
+ if not os.path.exists(directory):
+ try:
+ os.makedirs(directory)
+ except OSError as err:
+ if err.errno != os.errno.EEXIST:
+ raise |
b6ee793158d549f3d04d42ecbeb1c63605d6258f | src/setup.py | src/setup.py | import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| import distutils.core
import distutils.extension
import Cython.Distutils
import numpy as np
compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
distutils.core.setup(
name='ANN-SoLo', cmdclass={'build_ext': Cython.Distutils.build_ext},
ext_modules=[ext_module], include_dirs=[np.get_include()],
)
| Add C++ compilation flag to ensure deterministic behavior | Add C++ compilation flag to ensure deterministic behavior
More information: https://github.com/spotify/annoy/pull/205
| Python | apache-2.0 | bittremieux/ANN-SoLo,bittremieux/ANN-SoLo | ---
+++
@@ -4,7 +4,7 @@
import Cython.Distutils
import numpy as np
-compile_args = ['-O3', '-march=native', '-ffast-math', '-std=c++14', '-fopenmp']
+compile_args = ['-O3', '-march=native', '-ffast-math', '-fno-associative-math', '-std=c++14', '-fopenmp']
ext_module = distutils.extension.Extension('spectrum_match', ['spectrum_match.pyx', 'SpectrumMatch.cpp'],
language='c++', extra_compile_args=compile_args, extra_link_args=compile_args)
|
ad54fa1eae6ea255f8fc57f3bb7c36d410dab705 | ratemyflight/settings.py | ratemyflight/settings.py |
# Maximum airports to return as JSON.
MAX_AIRPORTS = 100
# Maximum flights to return as JSON.
MAX_FLIGHTS = 100
# Pixel size of gravatar icons.
GRAVATAR_SIZE = 32
|
# Maximum airports to return as JSON.
MAX_AIRPORTS = 100
# Maximum flights to return as JSON.
MAX_FLIGHTS = 10
# Pixel size of gravatar icons.
GRAVATAR_SIZE = 32
| Reduce MAX_FLIGHTS on screen to 10. | Reduce MAX_FLIGHTS on screen to 10.
| Python | bsd-2-clause | stephenmcd/ratemyflight,stephenmcd/ratemyflight | ---
+++
@@ -4,7 +4,7 @@
MAX_AIRPORTS = 100
# Maximum flights to return as JSON.
-MAX_FLIGHTS = 100
+MAX_FLIGHTS = 10
# Pixel size of gravatar icons.
GRAVATAR_SIZE = 32 |
7bece2c307cb05504c4b778446a4867ab8b6c196 | indra/tests/test_ontmapper.py | indra/tests/test_ontmapper.py | from indra.statements import Influence, Concept
from indra.preassembler.ontology_mapper import OntologyMapper, wm_ontomap
def test_map():
c1 = Concept('x', db_refs={'UN': [('entities/x', 1.0)]})
c2 = Concept('y', db_refs={'BBN': 'entities/y'})
c3 = Concept('z')
stmts = [Influence(c1, c3), Influence(c2, c3)]
om = OntologyMapper(stmts)
om.map_statements()
assert len(om.statements) == 2
assert om.statements[0].subj.db_refs['BBN'] == 'entities/y', \
om.statements[0].subj.db_refs
assert om.statements[1].subj.db_refs['UN'] == [('entities/x', 1.0)], \
om.statements[1].subj.db_refs
def test_wm_map():
c1 = Concept('x', db_refs={'UN': [('')]})
om = OntologyMapper(stmts, wm_ontomap) | from indra.statements import Influence, Concept
from indra.preassembler.ontology_mapper import OntologyMapper, wm_ontomap
def test_map():
c1 = Concept('x', db_refs={'UN': [('entities/x', 1.0)]})
c2 = Concept('y', db_refs={'BBN': 'entities/y'})
c3 = Concept('z')
stmts = [Influence(c1, c3), Influence(c2, c3)]
om = OntologyMapper(stmts)
om.map_statements()
assert len(om.statements) == 2
assert om.statements[0].subj.db_refs['BBN'] == 'entities/y', \
om.statements[0].subj.db_refs
assert om.statements[1].subj.db_refs['UN'] == [('entities/x', 1.0)], \
om.statements[1].subj.db_refs
def test_wm_map():
c1 = Concept('x', db_refs={'UN': [('UN/properties/price', 1.0)]})
c2 = Concept('y', db_refs={'UN': [('UN/entities/human/education', 1.0)]})
stmts = [Influence(c1, c2)]
om = OntologyMapper(stmts, wm_ontomap, symmetric=False)
om.map_statements()
stmt = om.statements[0]
assert 'BBN' in stmt.subj.db_refs
assert 'BBN' in stmt.obj.db_refs
assert 'SOFIA' in stmt.subj.db_refs
assert 'SOFIA' in stmt.obj.db_refs
| Add simple test for WM ontology mapping | Add simple test for WM ontology mapping
| Python | bsd-2-clause | pvtodorov/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,bgyori/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/indra | ---
+++
@@ -17,5 +17,13 @@
def test_wm_map():
- c1 = Concept('x', db_refs={'UN': [('')]})
- om = OntologyMapper(stmts, wm_ontomap)
+ c1 = Concept('x', db_refs={'UN': [('UN/properties/price', 1.0)]})
+ c2 = Concept('y', db_refs={'UN': [('UN/entities/human/education', 1.0)]})
+ stmts = [Influence(c1, c2)]
+ om = OntologyMapper(stmts, wm_ontomap, symmetric=False)
+ om.map_statements()
+ stmt = om.statements[0]
+ assert 'BBN' in stmt.subj.db_refs
+ assert 'BBN' in stmt.obj.db_refs
+ assert 'SOFIA' in stmt.subj.db_refs
+ assert 'SOFIA' in stmt.obj.db_refs |
b8ac4107535910e257de903d89f4879869035710 | ddsc_incron/settings.py | ddsc_incron/settings.py | from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
| from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'level': 'INFO',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'INFO',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
| Set default log level to INFO | Set default log level to INFO
| Python | mit | ddsc/ddsc-incron | ---
+++
@@ -31,15 +31,14 @@
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
- 'formatter': 'verbose',
- 'level': 'DEBUG',
+ 'level': 'INFO',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
- 'level': 'DEBUG',
+ 'level': 'INFO',
},
},
} |
1f3164f95f0ce40bac38ac384bf5fdd181ab5fa1 | importlib_metadata/__init__.py | importlib_metadata/__init__.py | from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
from . import _hooks # noqa: F401
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
| from .api import (
Distribution, PackageNotFoundError, distribution, distributions,
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
__import__('importlib_metadata._hooks')
__all__ = [
'Distribution',
'PackageNotFoundError',
'distribution',
'distributions',
'entry_points',
'files',
'metadata',
'requires',
'version',
]
__version__ = version(__name__)
| Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention. | Use imperative import to avoid lint (import order) and as a good convention when side-effects is the intention.
| Python | apache-2.0 | python/importlib_metadata | ---
+++
@@ -3,7 +3,7 @@
entry_points, files, metadata, requires, version)
# Import for installation side-effects.
-from . import _hooks # noqa: F401
+__import__('importlib_metadata._hooks')
__all__ = [ |
0f966f1622b5b61fc0a385eb13a7d270013104f9 | orchestrator/jinja/filters.py | orchestrator/jinja/filters.py | from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, filter, map, zip)
import re
__author__ = 'sukrit'
"""
Package that includes custom filters required for totem config processing
"""
USE_FILTERS = ('replace_regex', )
def replace_regex(input_str, find, replace):
"""
Regex replace filter that replaces all occurrences of given regex match
in a given string
:param input_str: Input string on which replacement is to be performed
:type input_str: str
:param find: Regular expression string that needs to be used for
find/replacement
:type find: str
:param replace: Regex replacement string
:type replace: str
:return: Regex replaced string
:rtype: str
"""
return re.sub(find, replace, input)
def apply_filters(env):
"""
Applies filters on jinja env.
:param env: Jinja environment
:return:
"""
for name in USE_FILTERS:
env.filters[name] = globals()[name]
return env
| from future.builtins import ( # noqa
bytes, dict, int, list, object, range, str,
ascii, chr, hex, input, next, oct, open,
pow, round, filter, map, zip)
import re
__author__ = 'sukrit'
"""
Package that includes custom filters required for totem config processing
"""
USE_FILTERS = ('replace_regex', )
def replace_regex(input_str, find, replace):
"""
Regex replace filter that replaces all occurrences of given regex match
in a given string
:param input_str: Input string on which replacement is to be performed
:type input_str: str
:param find: Regular expression string that needs to be used for
find/replacement
:type find: str
:param replace: Regex replacement string
:type replace: str
:return: Regex replaced string
:rtype: str
"""
return re.sub(find, replace, input_str)
def apply_filters(env):
"""
Applies filters on jinja env.
:param env: Jinja environment
:return:
"""
for name in USE_FILTERS:
env.filters[name] = globals()[name]
return env
| Fix variable for input string | Fix variable for input string
| Python | mit | totem/cluster-orchestrator,totem/cluster-orchestrator,totem/cluster-orchestrator | ---
+++
@@ -29,7 +29,7 @@
:return: Regex replaced string
:rtype: str
"""
- return re.sub(find, replace, input)
+ return re.sub(find, replace, input_str)
def apply_filters(env): |
fe92323dfa1067d552abefa60910e758500f0920 | virtool/handlers/files.py | virtool/handlers/files.py | import virtool.file
from virtool.handlers.utils import json_response
async def find(req):
db = req.app["db"]
cursor = db.files.find({"eof": True}, virtool.file.LIST_PROJECTION)
found_count = await cursor.count()
documents = [virtool.file.processor(d) for d in await cursor.to_list(15)]
return json_response({
"documents": documents,
"found_count": found_count
})
| import os
import virtool.file
import virtool.utils
from virtool.handlers.utils import json_response, not_found
async def find(req):
db = req.app["db"]
cursor = db.files.find({"ready": True}, virtool.file.LIST_PROJECTION)
found_count = await cursor.count()
documents = [virtool.file.processor(d) for d in await cursor.to_list(15)]
return json_response({
"documents": documents,
"found_count": found_count
})
async def remove(req):
file_id = req.match_info["file_id"]
file_path = os.path.join(req.app["settings"].get("data_path"), "files", file_id)
delete_result = await req.app["db"].files.delete_one({"_id": file_id})
virtool.utils.rm(file_path)
if delete_result.deleted_count == 0:
return not_found("Document does not exist")
await req.app["dispatcher"].dispatch("files", "remove", [file_id])
return json_response({
"file_id": file_id,
"removed": True
})
| Add uploaded file removal endpoint | Add uploaded file removal endpoint
| Python | mit | igboyes/virtool,virtool/virtool,virtool/virtool,igboyes/virtool | ---
+++
@@ -1,11 +1,14 @@
+import os
+
import virtool.file
-from virtool.handlers.utils import json_response
+import virtool.utils
+from virtool.handlers.utils import json_response, not_found
async def find(req):
db = req.app["db"]
- cursor = db.files.find({"eof": True}, virtool.file.LIST_PROJECTION)
+ cursor = db.files.find({"ready": True}, virtool.file.LIST_PROJECTION)
found_count = await cursor.count()
@@ -15,3 +18,23 @@
"documents": documents,
"found_count": found_count
})
+
+
+async def remove(req):
+ file_id = req.match_info["file_id"]
+
+ file_path = os.path.join(req.app["settings"].get("data_path"), "files", file_id)
+
+ delete_result = await req.app["db"].files.delete_one({"_id": file_id})
+
+ virtool.utils.rm(file_path)
+
+ if delete_result.deleted_count == 0:
+ return not_found("Document does not exist")
+
+ await req.app["dispatcher"].dispatch("files", "remove", [file_id])
+
+ return json_response({
+ "file_id": file_id,
+ "removed": True
+ }) |
be23c953f8f27a8d178022d3ecb44f461100bbc5 | tests/__init__.py | tests/__init__.py | """Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
| """Tests for running TopoFlow components in CMI."""
import os
def locate_topoflow(cache_dir):
for x in os.listdir(cache_dir):
if x.startswith('topoflow'):
return x
root_dir = '/home/csdms/wmt/topoflow.0'
cache_dir = os.path.join(root_dir, 'cache')
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
# Used by tests for D8 and Erode components.
data_dir = os.path.join(os.path.abspath('..'), 'data')
test_dir = os.path.dirname(__file__)
| Add path to data directory | Add path to data directory
| Python | mit | mdpiper/topoflow-cmi-testing | ---
+++
@@ -13,3 +13,7 @@
topoflow_dir = locate_topoflow(cache_dir)
example_dir = os.path.join(cache_dir, topoflow_dir,
'topoflow', 'examples', 'Treynor_Iowa')
+
+# Used by tests for D8 and Erode components.
+data_dir = os.path.join(os.path.abspath('..'), 'data')
+test_dir = os.path.dirname(__file__) |
96c90af1e3e2c6a35a261db5a67bdfc236983a06 | tests/conftest.py | tests/conftest.py | # -*- coding: utf-8 -*-
'''
General-purpose fixtures for vdirsyncer's testsuite.
'''
import logging
import os
import click_log
from hypothesis import HealthCheck, Verbosity, settings
import pytest
@pytest.fixture(autouse=True)
def setup_logging():
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
@pytest.fixture(autouse=True)
def suppress_py2_warning(monkeypatch):
monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda _: None)
try:
import pytest_benchmark
except ImportError:
@pytest.fixture
def benchmark():
return lambda x: x()
else:
del pytest_benchmark
settings.register_profile("ci", settings(
max_examples=1000,
verbosity=Verbosity.verbose,
suppress_health_check=[HealthCheck.too_slow]
))
settings.register_profile("deterministic", settings(
derandomize=True,
))
if os.getenv('DETERMINISTIC_TESTS').lower() == 'true':
settings.load_profile("deterministic")
elif os.getenv('CI').lower() == 'true':
settings.load_profile("ci")
| # -*- coding: utf-8 -*-
'''
General-purpose fixtures for vdirsyncer's testsuite.
'''
import logging
import os
import click_log
from hypothesis import HealthCheck, Verbosity, settings
import pytest
@pytest.fixture(autouse=True)
def setup_logging():
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
# XXX: Py2
@pytest.fixture(autouse=True)
def suppress_py2_warning(monkeypatch):
monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda _: None)
try:
import pytest_benchmark
except ImportError:
@pytest.fixture
def benchmark():
return lambda x: x()
else:
del pytest_benchmark
settings.register_profile("ci", settings(
max_examples=1000,
verbosity=Verbosity.verbose,
suppress_health_check=[HealthCheck.too_slow]
))
settings.register_profile("deterministic", settings(
derandomize=True,
))
if os.getenv('DETERMINISTIC_TESTS').lower() == 'true':
settings.load_profile("deterministic")
elif os.getenv('CI').lower() == 'true':
settings.load_profile("ci")
| Add flag about Py2 monkeypatch for tests | Add flag about Py2 monkeypatch for tests
| Python | mit | untitaker/vdirsyncer,hobarrera/vdirsyncer,untitaker/vdirsyncer,hobarrera/vdirsyncer,untitaker/vdirsyncer | ---
+++
@@ -17,6 +17,7 @@
click_log.basic_config('vdirsyncer').setLevel(logging.DEBUG)
+# XXX: Py2
@pytest.fixture(autouse=True)
def suppress_py2_warning(monkeypatch):
monkeypatch.setattr('vdirsyncer.cli._check_python2', lambda _: None) |
ef82619995ce71b5877228988bce2830eb25baff | download_and_unzip_files.py | download_and_unzip_files.py | import os
import datetime
current_year = datetime.datetime.now().year
years_with_data = range(2011, current_year + 1)
remote_path = "https://ssl.netfile.com/pub2/excel/COAKBrowsable/"
for year in years_with_data:
print "Downloading " + str(year) + " data..."
filename_for_year = "efile_newest_COAK_" + str(year) + ".zip"
os.system("wget " + remote_path + filename_for_year)
os.system("unzip " + filename_for_year)
os.system("rm " + filename_for_year)
| import os
import datetime
current_year = datetime.datetime.now().year
years_with_data = range(2011, current_year + 1)
remote_path = "https://ssl.netfile.com/pub2/excel/COAKBrowsable/"
for year in years_with_data:
print "Downloading " + str(year) + " data..."
filename_for_year = "efile_newest_COAK_" + str(year) + ".zip"
os.system("curl -f -L -O " + remote_path + filename_for_year)
os.system("unzip " + filename_for_year)
os.system("rm " + filename_for_year)
| Replace wget with curl to avoid Heroku buildpack sadness | Replace wget with curl to avoid Heroku buildpack sadness
| Python | bsd-3-clause | daguar/netfile-etl,daguar/netfile-etl | ---
+++
@@ -8,6 +8,6 @@
for year in years_with_data:
print "Downloading " + str(year) + " data..."
filename_for_year = "efile_newest_COAK_" + str(year) + ".zip"
- os.system("wget " + remote_path + filename_for_year)
+ os.system("curl -f -L -O " + remote_path + filename_for_year)
os.system("unzip " + filename_for_year)
os.system("rm " + filename_for_year) |
0c9bf270a7a2d8a4184f644bbe8a50995e155b0a | buddy/error.py | buddy/error.py | from functools import wraps
import botocore.exceptions
from click import ClickException
EXC_TO_ECHO = [
botocore.exceptions.NoRegionError,
]
def handle_exception(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
if exc.__class__ in EXC_TO_ECHO:
msg = '%s: %s' % (exc.__class__, exc)
raise ClickException(msg)
raise
return wrapper
| from functools import wraps
import botocore.exceptions
from click import ClickException
EXC_TO_ECHO = [
botocore.exceptions.NoRegionError,
botocore.exceptions.ParamValidationError,
]
def handle_exception(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as exc:
if exc.__class__ in EXC_TO_ECHO:
msg = '%s: %s' % (exc.__class__, exc)
raise ClickException(msg)
raise
return wrapper
| Add boto ParamValidationError to exc list | Add boto ParamValidationError to exc list
| Python | mit | pior/buddy | ---
+++
@@ -7,6 +7,7 @@
EXC_TO_ECHO = [
botocore.exceptions.NoRegionError,
+ botocore.exceptions.ParamValidationError,
]
|
f1e516e8002425f5f4f9904096848798b2bc97fa | jesusmtnez/python/kata/game.py | jesusmtnez/python/kata/game.py | class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
| class Game():
def __init__(self):
self._rolls = [0] * 21
self._current_roll = 0
def roll(self, pins):
self._rolls[self._current_roll] += pins
self._current_roll += 1 if pins < 10 else 2
def score(self):
score = 0
for frame in range(0, 20, 2):
if self._is_strike(frame):
score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
return score
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
def _is_strike(self, frame):
print(frame)
return self._rolls[frame] == 10
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1]
| Add strikes support when rolling | [Python] Add strikes support when rolling
| Python | mit | JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge | ---
+++
@@ -5,12 +5,14 @@
def roll(self, pins):
self._rolls[self._current_roll] += pins
- self._current_roll += 1
+ self._current_roll += 1 if pins < 10 else 2
def score(self):
score = 0
for frame in range(0, 20, 2):
- if self._is_spare(frame):
+ if self._is_strike(frame):
+ score += 10 + self._rolls[frame + 2] + self._rolls[frame + 3]
+ elif self._is_spare(frame):
score += 10 + self._rolls[frame + 2]
else:
score += self._frame_score(frame)
@@ -19,5 +21,9 @@
def _is_spare(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] == 10
+ def _is_strike(self, frame):
+ print(frame)
+ return self._rolls[frame] == 10
+
def _frame_score(self, frame):
return self._rolls[frame] + self._rolls[frame + 1] |
d957301018fa47ce61fcb004880ecd5acd18f2a9 | features/events/utils.py | features/events/utils.py | from django.utils import timezone
def get_requested_time(request):
query = request.GET
month, year = query.get('month', None), query.get('year', None)
if month and year:
return timezone.datetime(year=int(year), month=int(month), day=1)
else:
return None
| from django.utils import timezone
def get_requested_time(request):
query = request.GET
month, year = query.get('month', None), query.get('year', None)
if month and year:
try:
return timezone.datetime(year=int(year), month=int(month), day=1)
except ValueError:
pass
return None
| Fix handling of invalid parameters | Fix handling of invalid parameters
| Python | agpl-3.0 | stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten | ---
+++
@@ -6,6 +6,8 @@
month, year = query.get('month', None), query.get('year', None)
if month and year:
- return timezone.datetime(year=int(year), month=int(month), day=1)
- else:
- return None
+ try:
+ return timezone.datetime(year=int(year), month=int(month), day=1)
+ except ValueError:
+ pass
+ return None |
d6929fa152bb8149fa9b4033135441030dd71260 | authentic2/idp/idp_openid/context_processors.py | authentic2/idp/idp_openid/context_processors.py | def get_url():
return reverse('openid-provider-xrds')
def openid_meta(request):
context = {
'openid_server': context['request'].build_absolute_uri(get_url())
}
content = '''<meta http-equiv="X-XRDS-Location" content="%(openid_server)s"/>
<meta http-equiv="X-YADIS-Location" content="%(openid_server)s" />
''' % context
return { 'openid_meta': context }
| from django.core.urlresolvers import reverse
def get_url():
return reverse('openid-provider-xrds')
def openid_meta(request):
context = {
'openid_server': request.build_absolute_uri(get_url())
}
content = '''<meta http-equiv="X-XRDS-Location" content="%(openid_server)s"/>
<meta http-equiv="X-YADIS-Location" content="%(openid_server)s" />
''' % context
return { 'openid_meta': content }
| Remove dependency on openid in the base template (bis) | Remove dependency on openid in the base template (bis)
Fixes #1357
| Python | agpl-3.0 | BryceLohr/authentic,BryceLohr/authentic,incuna/authentic,incuna/authentic,adieu/authentic2,incuna/authentic,adieu/authentic2,adieu/authentic2,BryceLohr/authentic,pu239ppy/authentic2,adieu/authentic2,pu239ppy/authentic2,BryceLohr/authentic,pu239ppy/authentic2,incuna/authentic,pu239ppy/authentic2,incuna/authentic | ---
+++
@@ -1,11 +1,13 @@
+from django.core.urlresolvers import reverse
+
def get_url():
return reverse('openid-provider-xrds')
def openid_meta(request):
context = {
- 'openid_server': context['request'].build_absolute_uri(get_url())
+ 'openid_server': request.build_absolute_uri(get_url())
}
content = '''<meta http-equiv="X-XRDS-Location" content="%(openid_server)s"/>
<meta http-equiv="X-YADIS-Location" content="%(openid_server)s" />
''' % context
- return { 'openid_meta': context }
+ return { 'openid_meta': content } |
e62469c3572cf9bfa02cd153becc1b36ecf8b3df | run-hooks.py | run-hooks.py | # -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def piterpy(endpoint, response):
for document in response['_items']:
document['PITERPY'] = 'IS SO COOL!'
app = Eve()
app.on_fetched_resource += piterpy
if __name__ == '__main__':
app.run()
| # -*- coding: utf-8 -*-
"""
Eve Demo
~~~~~~~~
A demostration of a simple API powered by Eve REST API.
The live demo is available at eve-demo.herokuapp.com. Please keep in mind
that the it is running on Heroku's free tier using a free MongoHQ
sandbox, which means that the first request to the service will probably
be slow. The database gets a reset every now and then.
:copyright: (c) 2016 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
def codemotion(endpoint, response):
for document in response['_items']:
document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run()
| Prepare for Codemotion Rome 2017 demo | Prepare for Codemotion Rome 2017 demo
| Python | bsd-3-clause | nicolaiarocci/eve-demo | ---
+++
@@ -18,12 +18,12 @@
from eve import Eve
-def piterpy(endpoint, response):
+def codemotion(endpoint, response):
for document in response['_items']:
- document['PITERPY'] = 'IS SO COOL!'
+ document['CODEMOTION'] = 'IS SO FREAKING COOL!'
app = Eve()
-app.on_fetched_resource += piterpy
+app.on_fetched_resource += codemotion
if __name__ == '__main__':
app.run() |
8f1e094d8abc8317d8e802c556e695d117f0fac1 | globus_cli/commands/task/event_list.py | globus_cli/commands/task/event_list.py | import click
from globus_cli.parsing import common_options, task_id_arg
from globus_cli.safeio import formatted_print
from globus_cli.services.transfer import iterable_response_to_dict, get_client
@click.command('event-list', help='List Events for a given task')
@common_options
@task_id_arg
@click.option(
"--limit", default=10, show_default=True, help="Limit number of results.")
@click.option(
"--filter-errors", is_flag=True, help="Filter results to errors")
@click.option(
"--filter-non-errors", is_flag=True, help="Filter results to non errors")
def task_event_list(task_id, limit, filter_errors, filter_non_errors):
"""
Executor for `globus task-event-list`
"""
client = get_client()
# set filter based on filter flags, if both set do nothing
filter_string = None
if filter_errors and not filter_non_errors:
filter_string = "is_error:1"
if filter_non_errors and not filter_errors:
filter_string = "is_error:1"
event_iterator = client.task_event_list(
task_id, num_results=limit, filter=filter_string)
formatted_print(event_iterator,
fields=(('Time', 'time'), ('Code', 'code'),
('Is Error', 'is_error'), ('Details', 'details')),
json_converter=iterable_response_to_dict)
| import click
from globus_cli.parsing import common_options, task_id_arg
from globus_cli.safeio import formatted_print
from globus_cli.services.transfer import iterable_response_to_dict, get_client
@click.command('event-list', help='List Events for a given task')
@common_options
@task_id_arg
@click.option(
"--limit", default=10, show_default=True, help="Limit number of results.")
@click.option(
"--filter-errors", is_flag=True, help="Filter results to errors")
@click.option(
"--filter-non-errors", is_flag=True, help="Filter results to non errors")
def task_event_list(task_id, limit, filter_errors, filter_non_errors):
"""
Executor for `globus task-event-list`
"""
client = get_client()
# cannot filter by both errors and non errors
if filter_errors and filter_non_errors:
raise click.UsageError("Cannot filter by both errors and non errors")
elif filter_errors:
filter_string = "is_error:1"
elif filter_non_errors:
filter_string = "is_error:0"
else:
filter_string = ""
event_iterator = client.task_event_list(
task_id, num_results=limit, filter=filter_string)
formatted_print(event_iterator,
fields=(('Time', 'time'), ('Code', 'code'),
('Is Error', 'is_error'), ('Details', 'details')),
json_converter=iterable_response_to_dict)
| Make task event-list filters mutually exclusive | Make task event-list filters mutually exclusive
| Python | apache-2.0 | globus/globus-cli,globus/globus-cli | ---
+++
@@ -21,12 +21,18 @@
"""
client = get_client()
- # set filter based on filter flags, if both set do nothing
- filter_string = None
- if filter_errors and not filter_non_errors:
+ # cannot filter by both errors and non errors
+ if filter_errors and filter_non_errors:
+ raise click.UsageError("Cannot filter by both errors and non errors")
+
+ elif filter_errors:
filter_string = "is_error:1"
- if filter_non_errors and not filter_errors:
- filter_string = "is_error:1"
+
+ elif filter_non_errors:
+ filter_string = "is_error:0"
+
+ else:
+ filter_string = ""
event_iterator = client.task_event_list(
task_id, num_results=limit, filter=filter_string) |
3b9a86bdb85aa04c2f2a0e40387f56d65fb54d46 | bin/trigger_upload.py | bin/trigger_upload.py | #!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
compose_meta = {'compose_id': compose_id}
fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
| #!/bin/env python
# -*- coding: utf8 -*-
""" Triggers an upload process with the specified raw.xz URL. """
import argparse
import logging
import logging.config
import multiprocessing.pool
import fedmsg.config
import fedimg.uploader
logging.config.dictConfig(fedmsg.config.load_config()['logging'])
log = logging.getLogger('fedmsg')
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
fedimg.uploader.upload(upload_pool, [url],
compose_id=compose_id,
push_notifications=push_notifications)
def get_args():
parser = argparse.ArgumentParser(
description="Trigger a manual upload process with the "
"specified raw.xz URL")
parser.add_argument(
"-u", "--url", type=str, help=".raw.xz URL", required=True)
parser.add_argument(
"-c", "--compose-id", type=str, help="compose id of the .raw.xz file",
required=True)
parser.add_argument(
"-p", "--push-notifications",
help="Bool to check if we need to push fedmsg notifications",
action="store_true", required=False)
args = parser.parse_args()
return args.url, args.compose_id, args.push_notifications
def main():
url, compose_id, push_notifications = get_args()
trigger_upload(url, compose_id, push_notifications)
if __name__ == '__main__':
main()
| Fix the script to send proper format of compose id | scripts: Fix the script to send proper format of compose id
Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
| Python | agpl-3.0 | fedora-infra/fedimg,fedora-infra/fedimg | ---
+++
@@ -16,9 +16,9 @@
def trigger_upload(compose_id, url, push_notifications):
upload_pool = multiprocessing.pool.ThreadPool(processes=4)
- compose_meta = {'compose_id': compose_id}
- fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta,
- push_notifications=push_notifications)
+ fedimg.uploader.upload(upload_pool, [url],
+ compose_id=compose_id,
+ push_notifications=push_notifications)
def get_args(): |
b03bc28da7476ca27e64b8cc01b685e11eb6d505 | menpodetect/pico/conversion.py | menpodetect/pico/conversion.py | from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
y, x = fitting.center
radius = fitting.diameter / 2.0
return PointDirectedGraph(np.array(((y, x),
(y + radius, x),
(y + radius, x + radius),
(y, x + radius))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
| from menpo.shape import PointDirectedGraph
import numpy as np
def pointgraph_from_circle(fitting):
diameter = fitting.diameter
radius = diameter / 2.0
y, x = fitting.center
y -= radius
x -= radius
return PointDirectedGraph(np.array(((y, x),
(y + diameter, x),
(y + diameter, x + diameter),
(y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]]))
| Fix the circle to rectangle code | Fix the circle to rectangle code
Was totally incorrect previously
| Python | bsd-3-clause | jabooth/menpodetect,yuxiang-zhou/menpodetect,yuxiang-zhou/menpodetect,jabooth/menpodetect | ---
+++
@@ -3,10 +3,13 @@
def pointgraph_from_circle(fitting):
+ diameter = fitting.diameter
+ radius = diameter / 2.0
y, x = fitting.center
- radius = fitting.diameter / 2.0
+ y -= radius
+ x -= radius
return PointDirectedGraph(np.array(((y, x),
- (y + radius, x),
- (y + radius, x + radius),
- (y, x + radius))),
+ (y + diameter, x),
+ (y + diameter, x + diameter),
+ (y, x + diameter))),
np.array([[0, 1], [1, 2], [2, 3], [3, 0]])) |
bec268ef554e6f30c2cecd52ecddcafc34c5b0db | tutorials/cmake_python_wrapper/v1/python/foo/__init__.py | tutorials/cmake_python_wrapper/v1/python/foo/__init__.py | import ctypes
import numpy as np
import os
__all__ = ['square']
lib = ctypes.cdll.LoadLibrary("libfoo.so")
lib.square.restype = ctypes.c_int
lib.square.argtypes = [ctypes.c_int]
def square(value):
"""
Parameters
----------
value: int
Returns
--------
value square
"""
return lib.square(value)
| import ctypes
import numpy as np
import os
import sys
__all__ = ['square']
_path = os.path.dirname(__file__)
libname = None
if sys.platform.startswith('linux'):
libname = 'libfoo.so'
elif sys.platform == 'darwin':
libname = 'libfoo.dylib'
elif sys.platform.startswith('win'):
libname = 'foo.dll'
if libname ==None:
print("Unknow platform", sys.platform)
else:
lib = ctypes.CDLL(libname)
lib.square.restype = ctypes.c_int
lib.square.argtypes = [ctypes.c_int]
def square(value):
"""
Parameters
----------
value: int
Returns
--------
value square
"""
return lib.square(value)
| Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX | Change to cmake to 3.4 and test sys.platform to choose lib extension to resolve import error on MacOSX
| Python | bsd-3-clause | gammapy/PyGamma15,gammapy/2015-MPIK-Workshop,gammapy/2015-MPIK-Workshop,gammapy/PyGamma15,gammapy/PyGamma15,gammapy/2015-MPIK-Workshop | ---
+++
@@ -1,23 +1,37 @@
import ctypes
import numpy as np
import os
+import sys
__all__ = ['square']
-lib = ctypes.cdll.LoadLibrary("libfoo.so")
-lib.square.restype = ctypes.c_int
-lib.square.argtypes = [ctypes.c_int]
+_path = os.path.dirname(__file__)
+
+libname = None
+if sys.platform.startswith('linux'):
+ libname = 'libfoo.so'
+elif sys.platform == 'darwin':
+ libname = 'libfoo.dylib'
+elif sys.platform.startswith('win'):
+ libname = 'foo.dll'
+if libname ==None:
+ print("Unknow platform", sys.platform)
+
+else:
+ lib = ctypes.CDLL(libname)
+
+ lib.square.restype = ctypes.c_int
+ lib.square.argtypes = [ctypes.c_int]
-def square(value):
- """
- Parameters
- ----------
- value: int
+ def square(value):
+ """
+ Parameters
+ ----------
+ value: int
- Returns
- --------
- value square
- """
- return lib.square(value)
-
+ Returns
+ --------
+ value square
+ """
+ return lib.square(value) |
def92e47ce05497e6a83cd8b8a569113956c6dc2 | events/serializers.py | events/serializers.py | from .models import Event, EventActivity
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
class Meta(object):
model = Event
depth = 1
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
| from .models import Event, EventActivity
from rest_framework import serializers
class EventSerializer(serializers.ModelSerializer):
class Meta(object):
model = Event
depth = 1
fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer):
pk = serializers.IntegerField()
name = serializers.CharField(max_length=100)
image = serializers.CharField(allow_blank=True, required=False)
datetime = serializers.DateTimeField(required=False)
address = serializers.CharField(allow_blank=True, required=False)
description = serializers.CharField(allow_blank=True, required=False)
is_registered = serializers.BooleanField()
class EventActivitySerializer(serializers.ModelSerializer):
class Meta(object):
model = EventActivity
| Add explicit fields on EventSerializer | Add explicit fields on EventSerializer
| Python | apache-2.0 | belatrix/BackendAllStars | ---
+++
@@ -6,6 +6,7 @@
class Meta(object):
model = Event
depth = 1
+ fields = ("pk", "name", "image", "datetime", "address", "description", "is_active", "is_upcoming", "location")
class EventSimpleSerializer(serializers.Serializer): |
512ab0b23963a3876b1747a1e5995cac9ce18c5d | tests/core/test_timers.py | tests/core/test_timers.py | # Module: test_timers
# Date: 10th February 2010
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Timers Tests"""
import pytest
from datetime import datetime, timedelta
from circuits import Event, Component, Timer
def pytest_funcarg__app(request):
return request.cached_setup(
setup=lambda: setupapp(request),
teardown=lambda app: teardownapp(app),
scope="module"
)
def setupapp(request):
app = App()
app.start()
return app
def teardownapp(app):
app.stop()
class Test(Event):
"""Test Event"""
class App(Component):
flag = False
def reset(self):
self.flag = False
def test(self):
self.flag = True
def test_timer(app):
timer = Timer(0.1, Test(), "timer")
timer.register(app)
assert pytest.wait_for(app, "flag")
app.reset()
def test_persistentTimer(app):
timer = Timer(0.1, Test(), "timer", persist=True)
timer.register(app)
for i in range(2):
assert pytest.wait_for(app, "flag")
app.reset()
timer.unregister()
def test_datetime(app):
now = datetime.now()
d = now + timedelta(seconds=0.1)
timer = Timer(d, Test(), "timer")
timer.register(app)
assert pytest.wait_for(app, "flag")
app.reset()
| # Module: test_timers
# Date: 10th February 2010
# Author: James Mills, prologic at shortcircuit dot net dot au
"""Timers Tests"""
import pytest
from datetime import datetime, timedelta
from circuits import Event, Component, Timer
def pytest_funcarg__app(request):
return request.cached_setup(
setup=lambda: setupapp(request),
teardown=lambda app: teardownapp(app),
scope="module"
)
def setupapp(request):
app = App()
app.start()
return app
def teardownapp(app):
app.stop()
class Test(Event):
"""Test Event"""
class App(Component):
flag = False
def reset(self):
self.flag = False
def test(self):
self.flag = True
def test_timer(app):
timer = Timer(0.1, Test(), "timer")
timer.register(app)
assert pytest.wait_for(app, "flag")
app.reset()
def test_persistentTimer(app):
timer = Timer(0.2, Test(), "timer", persist=True)
timer.register(app)
for i in range(2):
assert pytest.wait_for(app, "flag")
app.reset()
timer.unregister()
def test_datetime(app):
now = datetime.now()
d = now + timedelta(seconds=0.1)
timer = Timer(d, Test(), "timer")
timer.register(app)
assert pytest.wait_for(app, "flag")
app.reset()
| Test fails on shining panda py32 only. May be a race condition (wait_for using same loop interval as timer interval). Checking in for testing. | Test fails on shining panda py32 only. May be a race condition (wait_for using same loop interval as timer interval). Checking in for testing.
| Python | mit | treemo/circuits,nizox/circuits,eriol/circuits,treemo/circuits,eriol/circuits,treemo/circuits,eriol/circuits | ---
+++
@@ -52,7 +52,7 @@
def test_persistentTimer(app):
- timer = Timer(0.1, Test(), "timer", persist=True)
+ timer = Timer(0.2, Test(), "timer", persist=True)
timer.register(app)
for i in range(2): |
16b663441c0d994b02e68b8c785ec6c7a2805f03 | onepercentclub/settings/payments.py | onepercentclub/settings/payments.py | from bluebottle.payments_docdata.settings import DOCDATA_SETTINGS, DOCDATA_PAYMENT_METHODS
PAYMENT_METHODS = DOCDATA_PAYMENT_METHODS
VAT_RATE = 0.21
| from bluebottle.payments_docdata.settings import DOCDATA_SETTINGS
PAYMENT_METHODS = (
{
'provider': 'docdata',
'id': 'docdata-ideal',
'profile': 'ideal',
'name': 'iDEAL',
'restricted_countries': ('NL', 'Netherlands'),
'supports_recurring': False,
},
{
'provider': 'docdata',
'id': 'docdata-directdebit',
'profile': 'directdebit',
'name': 'Direct Debit',
'supports_recurring': True,
},
{
'provider': 'docdata',
'id': 'docdata-creditcard',
'profile': 'creditcard',
'name': 'CreditCard',
'supports_recurring': False,
},
# {
# 'provider': 'docdata',
# 'id': 'docdata-paypal',
# 'profile': 'paypal',
# 'name': 'Paypal',
# 'supports_recurring': False,
# },
)
VAT_RATE = 0.21
| Disable Paypal, up Direct debit | Disable Paypal, up Direct debit
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | ---
+++
@@ -1,4 +1,34 @@
-from bluebottle.payments_docdata.settings import DOCDATA_SETTINGS, DOCDATA_PAYMENT_METHODS
+from bluebottle.payments_docdata.settings import DOCDATA_SETTINGS
-PAYMENT_METHODS = DOCDATA_PAYMENT_METHODS
+PAYMENT_METHODS = (
+ {
+ 'provider': 'docdata',
+ 'id': 'docdata-ideal',
+ 'profile': 'ideal',
+ 'name': 'iDEAL',
+ 'restricted_countries': ('NL', 'Netherlands'),
+ 'supports_recurring': False,
+ },
+ {
+ 'provider': 'docdata',
+ 'id': 'docdata-directdebit',
+ 'profile': 'directdebit',
+ 'name': 'Direct Debit',
+ 'supports_recurring': True,
+ },
+ {
+ 'provider': 'docdata',
+ 'id': 'docdata-creditcard',
+ 'profile': 'creditcard',
+ 'name': 'CreditCard',
+ 'supports_recurring': False,
+ },
+ # {
+ # 'provider': 'docdata',
+ # 'id': 'docdata-paypal',
+ # 'profile': 'paypal',
+ # 'name': 'Paypal',
+ # 'supports_recurring': False,
+ # },
+)
VAT_RATE = 0.21 |
460460cb95906a71f66a969ab0f2e9d5285b4d21 | kokki/cookbooks/aws/recipes/default.py | kokki/cookbooks/aws/recipes/default.py |
import os
from kokki import *
# Package("python-boto")
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
|
import os
from kokki import *
# Package("python-boto")
Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
# Mount volumes and format is necessary
for vol in env.config.aws.volumes:
env.cookbooks.aws.EBSVolume(vol['volume_id'],
availability_zone = env.config.aws.availability_zone,
device = vol['device'],
action = "attach")
if vol.get('fstype'):
if vol['fstype'] == "xfs":
Package("xfsprogs")
Execute("mkfs.%(fstype)s -f %(device)s" % vol,
not_if = """if [ "`file -s %(device)s`" = "%(device)s: data" ]; then exit 1; fi""" % vol)
if vol.get('mount_point'):
Mount(vol['mount_point'],
device = vol['device'],
fstype = vol.get('fstype'),
options = vol.get('fsoptions', ["noatime"]),
action = ["mount", "enable"])
| Update boto on the server | Update boto on the server
| Python | bsd-3-clause | samuel/kokki | ---
+++
@@ -3,10 +3,10 @@
from kokki import *
# Package("python-boto")
+Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
+ only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
Execute("pip install git+http://github.com/boto/boto.git#egg=boto",
not_if = 'python -c "import boto"')
-Execute("mv /usr/lib/pymodules/python2.6/boto /tmp/boto.orig",
- only_if = lambda:os.path.exists("/usr/lib/pymodules/python2.6/boto"))
# Mount volumes and format is necessary
|
8e513a0d690bbd6db72d6745cc0f423355648cd4 | mysite/profile/management/commands/profile_hourly_tasks.py | mysite/profile/management/commands/profile_hourly_tasks.py | import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
| import datetime
import logging
from django.core.management.base import BaseCommand
import mysite.profile.tasks
import mysite.search.models
import mysite.search.tasks
## FIXME: Move to a search management command?
def periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch():
logging.info("Checking if bug epoch eclipsed the cached search epoch")
cache_time = mysite.search.models.Epoch.get_for_string('search_cache')
bug_time = mysite.search.models.Epoch.get_for_string('search_cache')
if cache_time < bug_time:
mysite.search.tasks.clear_search_cache()
mysite.search.models.Epoch.bump_for_string('search_cache')
logging.info("Finished dealing with bug epoch vs. cached search epoch.")
class Command(BaseCommand):
help = "Run this once hourly for the OpenHatch profile app."
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0:
periodically_check_if_bug_epoch_eclipsed_the_cached_search_epoch()
| Revert "Remove apparently superfluous call to fill_recommended_bugs_cache." | Revert "Remove apparently superfluous call to fill_recommended_bugs_cache."
This reverts commit 83ca8575e68fe9b2b59431e73e94b3247e3485d4.
| Python | agpl-3.0 | onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,campbe13/openhatch,eeshangarg/oh-mainline,openhatch/oh-mainline,waseem18/oh-mainline,campbe13/openhatch,vipul-sharma20/oh-mainline,jledbetter/openhatch,mzdaniel/oh-mainline,willingc/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,moijes12/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,Changaco/oh-mainline,Changaco/oh-mainline,willingc/oh-mainline,waseem18/oh-mainline,jledbetter/openhatch,sudheesh001/oh-mainline,jledbetter/openhatch,vipul-sharma20/oh-mainline,eeshangarg/oh-mainline,jledbetter/openhatch,ojengwa/oh-mainline,campbe13/openhatch,Changaco/oh-mainline,campbe13/openhatch,jledbetter/openhatch,ojengwa/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,ojengwa/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,waseem18/oh-mainline,willingc/oh-mainline,campbe13/openhatch,openhatch/oh-mainline,moijes12/oh-mainline,moijes12/oh-mainline,onceuponatimeforever/oh-mainline,openhatch/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,mzdaniel/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,nirmeshk/oh-mainline,ehashman/oh-mainline,SnappleCap/oh-mainline,SnappleCap/oh-mainline,openhatch/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,ehashman/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,heeraj123/oh-mainline,Changaco/oh-mainline,moijes12/oh-mainline,heeraj123/oh-mainline,waseem18/oh-mainline,waseem18/oh-mainline,heeraj123/oh-mainline,nirmeshk/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,vipul-sharma20/oh-mainline,nirmeshk/oh-mainline,vipul-sharma20/oh-mainline,heeraj123/oh-mainline,ehashman/oh-mainline,willingc/oh-mainline,openhatch/oh-mainline,sudheesh001/oh-mainline,ojengwa/oh-mainline,eeshangarg/oh-mainline | ---
+++
@@ -23,6 +23,7 @@
def handle(self, *args, **options):
mysite.profile.tasks.sync_bug_epoch_from_model_then_fill_recommended_bugs_cache()
+ mysite.profile.tasks.fill_recommended_bugs_cache()
# Every 4 hours, clear search cache
if (datetime.datetime.utcnow().hour % 4) == 0: |
b92c8145ac8073d2fd0c91446f8011ca2265708a | Winium/TestApp.Test/py-functional/config_silverlight.py | Winium/TestApp.Test/py-functional/config_silverlight.py | # coding: utf-8
import os
BUILD_CONFIG = os.environ.get('BUILD_CONFIG', 'Debug')
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
AUT_PATH = r"..\TestApp.Silverlight\Bin\x86\{0}\TestApp_{0}_x86.xap".format(BUILD_CONFIG)
DESIRED_CAPABILITIES = {
"deviceName": "Emulator 8.1",
"app": os.path.abspath(os.path.join(BASE_DIR, AUT_PATH)),
}
| # coding: utf-8
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
CONFIG_IDENTIFIER = 'Release' if os.getenv('REMOTE_RUN') else 'Debug'
AUT_PATH = r"..\TestApp.Silverlight\Bin\{0}\TestApp_{0}_AnyCPU.xap".format(CONFIG_IDENTIFIER)
DESIRED_CAPABILITIES = {
"deviceName": "Emulator 8.1",
"app": os.path.abspath(os.path.join(BASE_DIR, AUT_PATH)),
}
| Fix app path for functional tests for silverlight | Fix app path for functional tests for silverlight
| Python | mpl-2.0 | 2gis/Winium.StoreApps,2gis/Winium.StoreApps | ---
+++
@@ -1,10 +1,9 @@
# coding: utf-8
import os
-BUILD_CONFIG = os.environ.get('BUILD_CONFIG', 'Debug')
-
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
-AUT_PATH = r"..\TestApp.Silverlight\Bin\x86\{0}\TestApp_{0}_x86.xap".format(BUILD_CONFIG)
+CONFIG_IDENTIFIER = 'Release' if os.getenv('REMOTE_RUN') else 'Debug'
+AUT_PATH = r"..\TestApp.Silverlight\Bin\{0}\TestApp_{0}_AnyCPU.xap".format(CONFIG_IDENTIFIER)
DESIRED_CAPABILITIES = {
"deviceName": "Emulator 8.1", |
f12b6383f5e18c8e76760f535c630bf256ec0f8a | incunafein/module/page/extensions/prepared_date.py | incunafein/module/page/extensions/prepared_date.py | from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter():
if not cls._prepared_date:
try:
return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return cls._prepared_date
def setter(value):
cls._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| from django.db import models
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
def getter(obj):
if not obj._prepared_date:
try:
return obj.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
return obj._prepared_date
def setter(obj, value):
obj._prepared_date = value
cls.prepared_date = property(getter, setter)
if admin_cls and admin_cls.fieldsets:
admin_cls.fieldsets[2][1]['fields'].append('_prepared_date')
| Use the object instead of the class | Use the object instead of the class
| Python | bsd-2-clause | incuna/incuna-feincms,incuna/incuna-feincms,incuna/incuna-feincms | ---
+++
@@ -3,16 +3,16 @@
def register(cls, admin_cls):
cls.add_to_class('_prepared_date', models.TextField('Date of Preparation', blank=True, null=True))
- def getter():
- if not cls._prepared_date:
+ def getter(obj):
+ if not obj._prepared_date:
try:
- return cls.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
+ return obj.get_ancestors(ascending=True).filter(_prepared_date__isnull=False)[0]._prepared_date
except IndexError:
return None
- return cls._prepared_date
+ return obj._prepared_date
- def setter(value):
- cls._prepared_date = value
+ def setter(obj, value):
+ obj._prepared_date = value
cls.prepared_date = property(getter, setter)
|
f01f09cc9a6ba5afb6e4d59ad73dc5645693b672 | filepreviews/__init__.py | filepreviews/__init__.py | # -*- coding: utf-8 -*-
# flake8: noqa
"""
A Python library for FilePreview's API.
"""
__title__ = "filepreviews"
__version__ = "3.0.0"
__author__ = "José Padilla"
__license__ = "MIT"
__copyright__ = "Copyright 2015 Blimp LLC"
VERSION = __version__
API_URL = "https://api.filepreviews.io/v2"
from .api import FilePreviews
from .exceptions import (
APIError,
AuthenticationError,
FilePreviewsError,
InvalidRequestError,
)
__all__ = [
"FilePreviews",
"FilePreviewsError",
"APIError",
"InvalidRequestError",
"AuthenticationError",
]
| # -*- coding: utf-8 -*-
# flake8: noqa
"""
A Python library for FilePreview's API.
"""
__title__ = "filepreviews"
__version__ = "3.0.0rc1"
__author__ = "José Padilla"
__license__ = "MIT"
__copyright__ = "Copyright 2015 Blimp LLC"
VERSION = __version__
API_URL = "https://api.filepreviews.io/v2"
from .api import FilePreviews
from .exceptions import (
APIError,
AuthenticationError,
FilePreviewsError,
InvalidRequestError,
)
__all__ = [
"FilePreviews",
"FilePreviewsError",
"APIError",
"InvalidRequestError",
"AuthenticationError",
]
| Bump up version to 3.0.0rc1 | Bump up version to 3.0.0rc1
| Python | mit | GetBlimp/filepreviews-python | ---
+++
@@ -5,7 +5,7 @@
"""
__title__ = "filepreviews"
-__version__ = "3.0.0"
+__version__ = "3.0.0rc1"
__author__ = "José Padilla"
__license__ = "MIT"
__copyright__ = "Copyright 2015 Blimp LLC" |
5ac176fafd35bfa675e1718b74a8c6ef4dc74629 | skoleintra/pgWeekplans.py | skoleintra/pgWeekplans.py | #
# -*- encoding: utf-8 -*-
#
import re
import config
import surllib
import semail
import datetime
import urllib
URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME
URL_MAIN = URL_PREFIX + 'Ugeplaner.asp'
def docFindWeekplans(bs):
trs = bs.findAll('tr')
for line in trs:
if not line.has_key('class'):
continue
if not [c for c in line['class'].split() if c.startswith('linje')]:
continue
links = line.findAll('a')
assert(len(links) >= 1)
# find week date
title = links[0].text
# find url
url = links[0]['href']
url = URL_PREFIX + urllib.quote(url.encode('iso-8859-1'), safe=':/?=&%')
bs = surllib.skoleGetURL(url, True)
msg = semail.Message('weekplans', bs)
msg.setTitle(u'%s' % title)
msg.updatePersonDate()
msg.maybeSend()
def skoleWeekplans():
global bs
# surllib.skoleLogin()
config.log(u'Kigger efter nye ugeplaner')
# read the initial page
bs = surllib.skoleGetURL(URL_MAIN, True, True)
docFindWeekplans(bs)
if __name__ == '__main__':
# test
skoleWeekplans()
| #
# -*- encoding: utf-8 -*-
#
import re
import config
import surllib
import semail
import datetime
import urllib
URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME
URL_MAIN = URL_PREFIX + 'Ugeplaner.asp'
def docFindWeekplans(bs):
trs = bs.findAll('tr')
for line in trs:
if not line.has_key('class'):
continue
if not [c for c in line['class'].split() if c.startswith('linje')]:
continue
links = line.findAll('a')
assert(len(links) >= 1)
# find week date
title = links[0].text
# find url
url = links[0]['href']
url = url.encode('iso-8859-1')
url = URL_PREFIX + urllib.quote(url, safe=':/?=&%')
bs = surllib.skoleGetURL(url, True)
msg = semail.Message('weekplans', bs)
msg.setTitle(u'%s' % title)
msg.updatePersonDate()
msg.maybeSend()
def skoleWeekplans():
global bs
# surllib.skoleLogin()
config.log(u'Kigger efter nye ugeplaner')
# read the initial page
bs = surllib.skoleGetURL(URL_MAIN, True, True)
docFindWeekplans(bs)
if __name__ == '__main__':
# test
skoleWeekplans()
| Make code comply to PEP8 | Make code comply to PEP8
| Python | bsd-2-clause | bennyslbs/fskintra | ---
+++
@@ -11,6 +11,7 @@
URL_PREFIX = 'http://%s/Infoweb/Fi/' % config.HOSTNAME
URL_MAIN = URL_PREFIX + 'Ugeplaner.asp'
+
def docFindWeekplans(bs):
@@ -30,7 +31,8 @@
# find url
url = links[0]['href']
- url = URL_PREFIX + urllib.quote(url.encode('iso-8859-1'), safe=':/?=&%')
+ url = url.encode('iso-8859-1')
+ url = URL_PREFIX + urllib.quote(url, safe=':/?=&%')
bs = surllib.skoleGetURL(url, True)
|
e3a3e729eb60f5a7e134da5b58bb52d672e1d8b2 | sitenco/config/sphinx.py | sitenco/config/sphinx.py | """
Docs with Sphinx.
"""
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
[sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
| """
Docs with Sphinx.
"""
import sys
import abc
import os.path
import subprocess
from . import vcs
from .. import DOCS_PATH
class Sphinx(vcs.VCS):
"""Abstract class for project folder tools."""
__metaclass__ = abc.ABCMeta
def __init__(self, path, branch='master', url=None):
path = os.path.join(DOCS_PATH, path)
super(Sphinx, self).__init__(path, branch, url)
class Git(Sphinx, vcs.Git):
"""Git tool."""
def update(self):
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path)
| Use python interpreter instead of sys.executable | Use python interpreter instead of sys.executable
| Python | bsd-3-clause | Kozea/sitenco | ---
+++
@@ -27,5 +27,5 @@
self._repository.fetch()
self._repository.reset('--hard', 'origin/' + self.branch)
subprocess.check_call(
- [sys.executable, 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
+ ['python3', 'setup.py', 'build_sphinx', '-b', 'dirhtml'],
cwd=self.path) |
3ed6e31cecab4166220a9fb20fe603b71946ab9c | every_election/apps/election_snooper/views.py | every_election/apps/election_snooper/views.py | from django.views.generic import TemplateView
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from election_snooper.models import SnoopedElection
from election_snooper.forms import ReviewElectionForm
class SnoopedElectionView(TemplateView):
template_name = "election_snooper/snooped_election_list.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
queryset = SnoopedElection.objects.all().order_by('date_seen')
if 'status' in self.request.GET:
queryset = queryset.filter(status=self.request.GET['status'])
if 'pk' in self.request.GET:
queryset = queryset.filter(pk=self.request.GET['pk'])
object_list = []
for item in queryset:
object_list.append(
ReviewElectionForm(instance=item, prefix=item.pk)
)
context['object_list'] = object_list
return context
def post(self, request, *args, **kwargs):
instance = SnoopedElection.objects.get(pk=request.POST.get('pk'))
form = ReviewElectionForm(
request.POST, instance=instance, prefix=instance.pk)
if form.is_valid():
form.save()
# TODO: if there's an error it's not processed yet
return HttpResponseRedirect(reverse('snooped_election_view'))
| from django.views.generic import TemplateView
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from election_snooper.models import SnoopedElection
from election_snooper.forms import ReviewElectionForm
class SnoopedElectionView(TemplateView):
template_name = "election_snooper/snooped_election_list.html"
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
queryset = SnoopedElection.objects.all().order_by('-date_seen', 'id')
if 'status' in self.request.GET:
queryset = queryset.filter(status=self.request.GET['status'])
if 'pk' in self.request.GET:
queryset = queryset.filter(pk=self.request.GET['pk'])
object_list = []
for item in queryset:
object_list.append(
ReviewElectionForm(instance=item, prefix=item.pk)
)
context['object_list'] = object_list
return context
def post(self, request, *args, **kwargs):
instance = SnoopedElection.objects.get(pk=request.POST.get('pk'))
form = ReviewElectionForm(
request.POST, instance=instance, prefix=instance.pk)
if form.is_valid():
form.save()
# TODO: if there's an error it's not processed yet
return HttpResponseRedirect(reverse('snooped_election_view'))
| Order radar view more sensibly | Order radar view more sensibly
1. Sort with by date_seen DESC so new stuff is at the top
and old stuff is at the bottom
2. Sort by id as secondary criteria so order is consistent
otherwise the order changes a bit every time we make an edit
| Python | bsd-3-clause | DemocracyClub/EveryElection,DemocracyClub/EveryElection,DemocracyClub/EveryElection | ---
+++
@@ -11,7 +11,7 @@
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
- queryset = SnoopedElection.objects.all().order_by('date_seen')
+ queryset = SnoopedElection.objects.all().order_by('-date_seen', 'id')
if 'status' in self.request.GET:
queryset = queryset.filter(status=self.request.GET['status']) |
73c904fffaa5e123b8e5dffcb93321defc684709 | tests/testapp/home/templatetags/pretty_tags.py | tests/testapp/home/templatetags/pretty_tags.py | import json
from bs4 import BeautifulSoup
from django import template
register = template.Library()
@register.filter
def pretty_json(data):
if isinstance(data, str):
data = json.loads(data)
return json.dumps(data, indent=4)
@register.filter
def pretty_html(data):
soup = BeautifulSoup(data, 'html5lib')
body = soup.find('body')
return body.prettify() # TODO: Strip the body tag.
| from __future__ import absolute_import, unicode_literals
import json
from bs4 import BeautifulSoup
from django import template
register = template.Library()
@register.filter
def pretty_json(data):
if isinstance(data, str):
data = json.loads(data)
return json.dumps(data, indent=4)
@register.filter
def pretty_html(data):
soup = BeautifulSoup(data, 'html5lib')
body = soup.find('body')
return body.prettify() # TODO: Strip the body tag.
| Add missing import path failing linting | Add missing import path failing linting
| Python | mit | springload/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,gasman/wagtaildraftail,springload/wagtaildraftail,springload/wagtaildraftail | ---
+++
@@ -1,3 +1,5 @@
+from __future__ import absolute_import, unicode_literals
+
import json
from bs4 import BeautifulSoup |
9cb56923ba57fec79a8efd4919315910f82f5da1 | angr/engines/soot/expressions/phi.py | angr/engines/soot/expressions/phi.py |
import logging
from .base import SimSootExpr
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def _execute(self):
local = [self._translate_value(v) for v, idx in self.expr.values if idx == self.state.scratch.source.block_idx][0]
value = self.state.memory.load(local, none_if_missing=True)
self.expr = value
|
import logging
from .base import SimSootExpr
l = logging.getLogger('angr.engines.soot.expressions.phi')
class SimSootExpr_Phi(SimSootExpr):
def _execute(self):
try:
local = [self._translate_value(v) for v, idx in self.expr.values if idx == self.state.scratch.source.block_idx][0]
value = self.state.memory.load(local, none_if_missing=True)
self.expr = value
except IndexError:
# TODO is there a better way to do this?
local_options = [self._translate_value(v) for v, idx in self.expr.values[::-1]]
for local in local_options:
value = self.state.memory.load(local, none_if_missing=True)
if value is not None:
self.expr = value
return
| Add heuristic when Phi expr fails | Add heuristic when Phi expr fails
| Python | bsd-2-clause | iamahuman/angr,schieb/angr,iamahuman/angr,angr/angr,iamahuman/angr,schieb/angr,angr/angr,angr/angr,schieb/angr | ---
+++
@@ -8,6 +8,16 @@
class SimSootExpr_Phi(SimSootExpr):
def _execute(self):
- local = [self._translate_value(v) for v, idx in self.expr.values if idx == self.state.scratch.source.block_idx][0]
- value = self.state.memory.load(local, none_if_missing=True)
- self.expr = value
+ try:
+ local = [self._translate_value(v) for v, idx in self.expr.values if idx == self.state.scratch.source.block_idx][0]
+ value = self.state.memory.load(local, none_if_missing=True)
+ self.expr = value
+ except IndexError:
+ # TODO is there a better way to do this?
+ local_options = [self._translate_value(v) for v, idx in self.expr.values[::-1]]
+ for local in local_options:
+ value = self.state.memory.load(local, none_if_missing=True)
+ if value is not None:
+ self.expr = value
+ return
+ |
cb69adc8727048ac9e1f7a1205a0f1b5ed269dad | Native/Scripts/finalize.py | Native/Scripts/finalize.py |
# This script just copies the generated .pyd file to the current directory.
import platform
from shutil import copyfile
from os.path import isfile
source_file = None
if platform.system() == "Windows":
possible_files = [
"Windows/Release/RSNative.pyd",
"Windows/x64/Release/RSNative.pyd",
]
target_location = "RSNative.pyd"
elif platform.system() == "Linux":
possible_files = [
"Linux/RSNative.so"
]
target_location = "RSNative.so"
for file in possible_files:
if isfile(file):
source_file = file
if source_file:
copyfile(source_file, target_file)
|
# This script just copies the generated .pyd file to the current directory.
import platform
from shutil import copyfile
from os.path import isfile
source_file = None
if platform.system() == "Windows":
possible_files = [
"Windows/Release/RSNative.pyd",
"Windows/x64/Release/RSNative.pyd",
]
target_location = "RSNative.pyd"
elif platform.system() == "Linux":
possible_files = [
"Linux/RSNative.so"
]
target_location = "RSNative.so"
for file in possible_files:
if isfile(file):
source_file = file
if source_file:
copyfile(source_file, target_location)
| Fix typo in build system | Fix typo in build system
| Python | mit | croxis/SpaceDrive,eswartz/RenderPipeline,eswartz/RenderPipeline,croxis/SpaceDrive,croxis/SpaceDrive,eswartz/RenderPipeline | ---
+++
@@ -14,7 +14,7 @@
"Windows/x64/Release/RSNative.pyd",
]
target_location = "RSNative.pyd"
-
+
elif platform.system() == "Linux":
possible_files = [
"Linux/RSNative.so"
@@ -26,5 +26,4 @@
source_file = file
if source_file:
- copyfile(source_file, target_file)
-
+ copyfile(source_file, target_location) |
33cf4e996d1ff230ae7e8a55e57f0bd5a046ea0f | dlstats/configuration.py | dlstats/configuration.py | import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
return os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
return '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
configuration_filename = _get_filename()
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
validator = validate.Validator()
configuration.validate(validator, copy=True)
configuration = configuration.dict()
| import configobj
import validate
import os
def _get_filename():
"""Return the configuration file path."""
appname = 'dlstats'
if os.name == 'posix':
if os.path.isfile(os.environ["HOME"]+'/.'+appname):
return os.environ["HOME"]+'/.'+appname
elif os.path.isfile('/etc/'+appname):
return '/etc/'+appname
else:
raise FileNotFoundError('No configuration file found.')
elif os.name == 'mac':
return ("%s/Library/Application Support/%s" % (os.environ["HOME"], appname))
elif os.name == 'nt':
return ("%s\Application Data\%s" % (os.environ["HOMEPATH"], appname))
else:
raise UnsupportedOSError(os.name)
configuration_filename = _get_filename()
_configspec = """
[General]
logging_directory = string()
socket_directory = string()
[MongoDB]
host = ip_addr()
port = integer()
max_pool_size = integer()
socketTimeoutMS = integer()
connectTimeoutMS = integer()
waitQueueTimeout = integer()
waitQueueMultiple = integer()
auto_start_request = boolean()
use_greenlets = boolean()
[Fetchers]
[[Eurostat]]
url_table_of_contents = string()"""
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
print(configuration)
validator = validate.Validator()
configuration.validate(validator)
configuration = configuration.dict()
| Remove copy from the validator | Remove copy from the validator
We are not providing default values to the configuration object. Those
are distributed with the package; the expected behavior is to throw an
exception if the options are not explicitly passed.
| Python | agpl-3.0 | MichelJuillard/dlstats,Widukind/dlstats,mmalter/dlstats,Widukind/dlstats,MichelJuillard/dlstats,mmalter/dlstats,mmalter/dlstats,MichelJuillard/dlstats | ---
+++
@@ -40,7 +40,8 @@
url_table_of_contents = string()"""
configuration = configobj.ConfigObj(configuration_filename,
configspec=_configspec.split('\n'))
+print(configuration)
validator = validate.Validator()
-configuration.validate(validator, copy=True)
+configuration.validate(validator)
configuration = configuration.dict()
|
f0c53900a62159249240dedc486678d451932cb1 | opps/articles/tests/models.py | opps/articles/tests/models.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from django.contrib.sites.models import Site
from django.contrib.auth import get_user_model
from django.core.files import File
from opps.articles.models import Post
from opps.channels.models import Channel
from opps.images.models import Image
class PostModelTest(TestCase):
def setUp(self):
User = get_user_model()
self.user = User.objects.create(username=u'test', password='test')
self.site = Site.objects.filter(name=u'example.com').get()
self.channel = Channel.objects.create(name=u'Home', slug=u'home',
description=u'home page',
site=self.site, user=self.user)
image = File(open("opps/__init__.py"), "test_file.png")
self.image = Image.objects.create(site=self.site, title='test',
image=image, user=self.user)
self.post = Post.objects.create(
title=u'Basic test',
slug=u'basic-test',
short_title=u'Write basict test for Article type Post',
content=u'This field is context',
main_image=self.image,
channel=self.channel,
user=self.user
)
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertEqual(post[0], self.post)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
from opps.articles.models import Post
class PostModelTest(TestCase):
fixtures = ['tests/initial_data.json']
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
self.assertEqual(len(post), 1)
self.assertEqual(post[0].slug, u'test-post-application')
self.assertEqual(post[0].title, u'test post application')
self.assertTrue(post[0].short_url)
| Fix articles test, used test initial_data | Fix articles test, used test initial_data
| Python | mit | williamroot/opps,jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,opps/opps,opps/opps,YACOWS/opps | ---
+++
@@ -1,41 +1,19 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.test import TestCase
-from django.contrib.sites.models import Site
-from django.contrib.auth import get_user_model
-from django.core.files import File
from opps.articles.models import Post
-from opps.channels.models import Channel
-from opps.images.models import Image
class PostModelTest(TestCase):
- def setUp(self):
- User = get_user_model()
- self.user = User.objects.create(username=u'test', password='test')
- self.site = Site.objects.filter(name=u'example.com').get()
- self.channel = Channel.objects.create(name=u'Home', slug=u'home',
- description=u'home page',
- site=self.site, user=self.user)
-
- image = File(open("opps/__init__.py"), "test_file.png")
- self.image = Image.objects.create(site=self.site, title='test',
- image=image, user=self.user)
-
- self.post = Post.objects.create(
- title=u'Basic test',
- slug=u'basic-test',
- short_title=u'Write basict test for Article type Post',
- content=u'This field is context',
- main_image=self.image,
- channel=self.channel,
- user=self.user
- )
+ fixtures = ['tests/initial_data.json']
def test_basic_post_exist(self):
post = Post.objects.all()
self.assertTrue(post)
- self.assertEqual(post[0], self.post)
+ self.assertEqual(len(post), 1)
+ self.assertEqual(post[0].slug, u'test-post-application')
+ self.assertEqual(post[0].title, u'test post application')
+ self.assertTrue(post[0].short_url) |
daf6468079e7ff3e00550db0f3a16bc109184027 | osgtest/tests/test_49_jobs.py | osgtest/tests/test_49_jobs.py | #pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
| #pylint: disable=C0301
#pylint: disable=C0111
#pylint: disable=R0201
#pylint: disable=R0904
import osgtest.library.core as core
import osgtest.library.files as files
import osgtest.library.osgunittest as osgunittest
class TestCleanupJobs(osgunittest.OSGTestCase):
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs')
files.restore(core.config['osg.local-job-environment'], owner='pbs')
| Drop job env backup cleanup dependence on osg-configure | Drop job env backup cleanup dependence on osg-configure
We already dropped the creation of the job env files in 840ea8
| Python | apache-2.0 | efajardo/osg-test,efajardo/osg-test | ---
+++
@@ -11,7 +11,6 @@
"""Clean any configuration we touched for running jobs"""
def test_01_restore_job_env(self):
- core.skip_ok_unless_installed('osg-configure')
core.skip_ok_unless_one_installed(['htcondor-ce', 'globus-gatekeeper', 'condor'])
files.restore(core.config['osg.job-environment'], owner='pbs') |
97e60ffa741bafbd34bcee18d0dce9f323b0132a | project/settings/prod.py | project/settings/prod.py | # Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
| # Local
from .base import *
# Heroku Settings
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
'testserver',
'.barberscore.com',
'.herokuapp.com',
]
DATABASES['default']['TEST'] = {
'NAME': DATABASES['default']['NAME'],
}
# Email
EMAIL_HOST = 'smtp.sendgrid.net'
EMAIL_HOST_USER = get_env_variable("SENDGRID_USERNAME")
EMAIL_HOST_PASSWORD = get_env_variable("SENDGRID_PASSWORD")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
EMAIL_SUBJECT_PREFIX = "[Barberscore] "
# Logging
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
}
| TEst adding test server to allowed hosts | TEst adding test server to allowed hosts
| Python | bsd-2-clause | dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore,barberscore/barberscore-api,dbinetti/barberscore-django | ---
+++
@@ -6,6 +6,7 @@
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
SECURE_SSL_REDIRECT = True
ALLOWED_HOSTS = [
+ 'testserver',
'.barberscore.com',
'.herokuapp.com',
] |
23d3bd38fc9ee94a4f9a9e829473007354a534ed | auditlog/__manifest__.py | auditlog/__manifest__.py | # -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
'pre_init_hook': 'pre_init_hook',
}
| # -*- coding: utf-8 -*-
# © 2015 ABF OSIELL <http://osiell.com>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
{
'name': "Audit Log",
'version': "9.0.1.0.0",
'author': "ABF OSIELL,Odoo Community Association (OCA)",
'license': "AGPL-3",
'website': "http://www.osiell.com",
'category': "Tools",
'depends': [
'base',
],
'data': [
'security/ir.model.access.csv',
'data/ir_cron.xml',
'views/auditlog_view.xml',
'views/http_session_view.xml',
'views/http_request_view.xml',
],
'images': [],
'application': True,
'installable': True,
}
| Remove pre_init_hook reference from openerp, no pre_init hook exists any more | auditlog: Remove pre_init_hook reference from openerp, no pre_init hook exists any more
| Python | agpl-3.0 | YannickB/server-tools,YannickB/server-tools,OCA/server-tools,OCA/server-tools,YannickB/server-tools,OCA/server-tools | ---
+++
@@ -22,5 +22,4 @@
'images': [],
'application': True,
'installable': True,
- 'pre_init_hook': 'pre_init_hook',
} |
1ba0d9a4f445b68d19105f405be223e44eb524af | webant/webserver_utils.py | webant/webserver_utils.py | '''
This module provides some function to make running a webserver a little easier
'''
def gevent_run(app):
from gevent.wsgi import WSGIServer
import gevent.monkey
from werkzeug.debug import DebuggedApplication
gevent.monkey.patch_socket()
run_app = app
if app.config['DEBUG']:
run_app = DebuggedApplication(app)
def run_server():
import logging
port = int(app.config.get('PORT', 5000))
address = app.config.get('ADDRESS', '')
logging.getLogger('webant').info('Listening on http://{}:{}/'.format(address or '0.0.0.0', port))
http_server = WSGIServer((address, port), run_app)
http_server.serve_forever()
if app.config['DEBUG']:
from werkzeug._reloader import run_with_reloader
run_with_reloader(run_server)
else:
run_server()
| '''
This module provides some function to make running a webserver a little easier
'''
def gevent_run(app):
from gevent.wsgi import WSGIServer
import gevent.monkey
from werkzeug.debug import DebuggedApplication
gevent.monkey.patch_socket()
run_app = app
if app.config['DEBUG']:
run_app = DebuggedApplication(app)
def run_server():
from gevent import version_info
logger = app._logger
port = int(app.config.get('PORT', 5000))
address = app.config.get('ADDRESS', '')
logger.info('Listening on http://{}:{}/'.format(address or '0.0.0.0', port))
server_params = dict()
#starting from gevent version 1.1b1 we can pass custom logger to gevent
if version_info[:2] >= (1,1):
server_params['log'] = logger
http_server = WSGIServer((address, port), run_app, **server_params)
http_server.serve_forever()
if app.config['DEBUG']:
from werkzeug._reloader import run_with_reloader
run_with_reloader(run_server)
else:
run_server()
| Use custom logger on gevent>=1.1 | Use custom logger on gevent>=1.1
| Python | agpl-3.0 | leophys/libreant,insomnia-lab/libreant,leophys/libreant,insomnia-lab/libreant,insomnia-lab/libreant,insomnia-lab/libreant,leophys/libreant,leophys/libreant | ---
+++
@@ -13,11 +13,17 @@
run_app = DebuggedApplication(app)
def run_server():
- import logging
+ from gevent import version_info
+
+ logger = app._logger
port = int(app.config.get('PORT', 5000))
address = app.config.get('ADDRESS', '')
- logging.getLogger('webant').info('Listening on http://{}:{}/'.format(address or '0.0.0.0', port))
- http_server = WSGIServer((address, port), run_app)
+ logger.info('Listening on http://{}:{}/'.format(address or '0.0.0.0', port))
+ server_params = dict()
+ #starting from gevent version 1.1b1 we can pass custom logger to gevent
+ if version_info[:2] >= (1,1):
+ server_params['log'] = logger
+ http_server = WSGIServer((address, port), run_app, **server_params)
http_server.serve_forever()
if app.config['DEBUG']: |
cc741215fa5ff6a2a64d7bee3ff6108a4d85a16f | bootstrap3/components.py | bootstrap3/components.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms.widgets import flatatt
from django.utils.safestring import mark_safe
from bootstrap3.utils import render_tag
from .text import text_value
def render_icon(icon, title=''):
"""
Render a Bootstrap glyphicon icon
"""
attrs = {
'class': 'glyphicon glyphicon-{icon}'.format(icon=icon),
}
if title:
attrs['title'] = title
return render_tag('span', attrs=attrs)
def render_alert(content, alert_type=None, dismissable=True):
"""
Render a Bootstrap alert
"""
button = ''
if not alert_type:
alert_type = 'info'
css_classes = ['alert', 'alert-' + text_value(alert_type)]
if dismissable:
css_classes.append('alert-dismissable')
button = '<button type="button" class="close" ' + \
'data-dismiss="alert" aria-hidden="true">×</button>'
button_placeholder = '__BUTTON__'
return mark_safe(render_tag(
'div',
attrs={'class': ' '.join(css_classes)},
content=button_placeholder + text_value(content),
).replace(button_placeholder, button))
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.forms.widgets import flatatt
from django.utils.safestring import mark_safe
from bootstrap3.utils import render_tag
from .text import text_value
def render_icon(icon, **kwargs):
"""
Render a Bootstrap glyphicon icon
"""
classes = ['glyphicon glyphicon-{icon}'.format(icon=icon)]
if kwargs.get('add_class'):
classes.append(kwargs.get('add_class'))
attrs = {
'class': ' '.join(classes),
}
if kwargs.get('title'):
attrs['title'] = kwargs.get('title')
return render_tag('span', attrs=attrs)
def render_alert(content, alert_type=None, dismissable=True):
"""
Render a Bootstrap alert
"""
button = ''
if not alert_type:
alert_type = 'info'
css_classes = ['alert', 'alert-' + text_value(alert_type)]
if dismissable:
css_classes.append('alert-dismissable')
button = '<button type="button" class="close" ' + \
'data-dismiss="alert" aria-hidden="true">×</button>'
button_placeholder = '__BUTTON__'
return mark_safe(render_tag(
'div',
attrs={'class': ' '.join(css_classes)},
content=button_placeholder + text_value(content),
).replace(button_placeholder, button))
| Allow passing extra classes into render_icon, for example to set text-success | Allow passing extra classes into render_icon, for example to set text-success
| Python | bsd-3-clause | dyve/django-bootstrap3,zostera/django-bootstrap4,dyve/django-bootstrap3,zostera/django-bootstrap4 | ---
+++
@@ -8,15 +8,19 @@
from .text import text_value
-def render_icon(icon, title=''):
+def render_icon(icon, **kwargs):
"""
Render a Bootstrap glyphicon icon
"""
+ classes = ['glyphicon glyphicon-{icon}'.format(icon=icon)]
+ if kwargs.get('add_class'):
+ classes.append(kwargs.get('add_class'))
attrs = {
- 'class': 'glyphicon glyphicon-{icon}'.format(icon=icon),
+ 'class': ' '.join(classes),
}
- if title:
- attrs['title'] = title
+ if kwargs.get('title'):
+ attrs['title'] = kwargs.get('title')
+
return render_tag('span', attrs=attrs)
|
ebc203958db534cdfd29dd53acae791e632cc30b | masters/master.chromium.git/master_source_cfg.py | masters/master.chromium.git/master_source_cfg.py | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
| # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%s')
c['change_source'].append(poller)
| Fix format string for revlinktmpl. | Fix format string for revlinktmpl.
TBR=iannucci@chromium.org
Review URL: https://codereview.chromium.org/12181028
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@180592 0039d316-1c4b-4281-b951-d872f2087c98
| Python | bsd-3-clause | eunchong/build,eunchong/build,eunchong/build,eunchong/build | ---
+++
@@ -9,5 +9,5 @@
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
- revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
+ revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%s')
c['change_source'].append(poller) |
a600d75133f01aa9cc30767634f3d77379e6ba56 | keyring/py27compat.py | keyring/py27compat.py | """
Compatibility support for Python 2.7. Remove when Python 2.7 support is
no longer required.
"""
try:
import configparser
except ImportError:
import ConfigParser as configparser
try:
input = raw_input
except NameError:
input = input
try:
text_type = unicode
except NameError:
text_type = str
try:
import cPickle as pickle
except ImportError:
import pickle
try:
from itertools import ifilter as filter
except ImportError:
filter = filter
# Taken from six.py
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
try:
import builtins
except ImportError:
import __builtin__ as builtins
| """
Compatibility support for Python 2.7. Remove when Python 2.7 support is
no longer required.
"""
try:
import configparser
except ImportError:
import ConfigParser as configparser
try:
input = raw_input
except NameError:
input = input
try:
text_type = unicode
except NameError:
text_type = str
string_types = tuple(set(str, text_type))
try:
import cPickle as pickle
except ImportError:
import pickle
try:
from itertools import ifilter as filter
except ImportError:
filter = filter
# Taken from six.py
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
for slots_var in orig_vars.get('__slots__', ()):
orig_vars.pop(slots_var)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
try:
import builtins
except ImportError:
import __builtin__ as builtins
| Add 'string_types' as found in six. | Add 'string_types' as found in six.
| Python | mit | jaraco/keyring | ---
+++
@@ -16,6 +16,8 @@
text_type = unicode
except NameError:
text_type = str
+
+string_types = tuple(set(str, text_type))
try:
import cPickle as pickle |
3386a32d0bae3433fad9f7c4960bdcb6a14a3835 | kobo/apps/__init__.py | kobo/apps/__init__.py | # coding: utf-8
import kombu.exceptions
from django.apps import AppConfig
from django.core.checks import register, Tags
from kpi.utils.two_database_configuration_checker import \
TwoDatabaseConfigurationChecker
class KpiConfig(AppConfig):
name = 'kpi'
def ready(self, *args, **kwargs):
# Once it's okay to read from the database, apply the user-desired
# autoscaling configuration for Celery workers
from kobo.celery import update_concurrency_from_constance
try:
update_concurrency_from_constance.delay()
except kombu.exceptions.OperationalError as e:
# It's normal for Django to start without access to a message
# broker, e.g. while running `./manage.py collectstatic`
# during a Docker image build
pass
return super().ready(*args, **kwargs)
register(TwoDatabaseConfigurationChecker().as_check(), Tags.database)
| # coding: utf-8
import kombu.exceptions
from django.apps import AppConfig
from django.core.checks import register, Tags
from kpi.utils.two_database_configuration_checker import \
TwoDatabaseConfigurationChecker
class KpiConfig(AppConfig):
name = 'kpi'
def ready(self, *args, **kwargs):
# Once it's okay to read from the database, apply the user-desired
# autoscaling configuration for Celery workers
from kobo.celery import update_concurrency_from_constance
try:
# Push this onto the task queue with `delay()` instead of calling
# it directly because a direct call in the absence of any Celery
# workers hangs indefinitely
update_concurrency_from_constance.delay()
except kombu.exceptions.OperationalError as e:
# It's normal for Django to start without access to a message
# broker, e.g. while running `./manage.py collectstatic`
# during a Docker image build
pass
return super().ready(*args, **kwargs)
register(TwoDatabaseConfigurationChecker().as_check(), Tags.database)
| Add explanatory comment for odd use of `delay()` | Add explanatory comment for odd use of `delay()`
| Python | agpl-3.0 | kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi | ---
+++
@@ -15,6 +15,9 @@
# autoscaling configuration for Celery workers
from kobo.celery import update_concurrency_from_constance
try:
+ # Push this onto the task queue with `delay()` instead of calling
+ # it directly because a direct call in the absence of any Celery
+ # workers hangs indefinitely
update_concurrency_from_constance.delay()
except kombu.exceptions.OperationalError as e:
# It's normal for Django to start without access to a message |
fb33fa9fef21bbe1e8d2d82dd986b69f8c3bdb64 | jp-holidays.py | jp-holidays.py | import requests
u = 'http://www.google.com/calendar/feeds/ja.japanese%23holiday@group.v.calendar.google.com/public/full?alt=json&max-results=100&futureevents=true'
data = requests.get(u).json()
print data.get('feed').get('title').get('$t')
for item in data.get('feed').get('entry'):
# gd$when
# 'title']['$t']
title = item['title']['$t']
day = item['gd$when'][0]['startTime']
print "{name}\t{day}".format(name = title.encode('utf8'), day=day.encode('utf8'))
| import requests
import sys
u = 'http://www.google.com/calendar/feeds/ja.japanese%23holiday@group.v.calendar.google.com/public/full'
params = {
'alt': 'json',
'max-results': 100,
'futureevents': 'true'
}
res = requests.get(u, params=params)
res.raise_for_status()
data = res.json()
print data.get('feed').get('title').get('$t')
for item in data.get('feed').get('entry'):
# ['title']['$t']
title = item['title']['$t']
# gd$when
day = item['gd$when'][0]['startTime']
print "{name}\t{day}".format(name = title.encode('utf8'), day=day.encode('utf8'))
| Use dict for uri params. | Use dict for uri params.
| Python | mit | shoma/python.tools | ---
+++
@@ -1,13 +1,22 @@
import requests
+import sys
-u = 'http://www.google.com/calendar/feeds/ja.japanese%23holiday@group.v.calendar.google.com/public/full?alt=json&max-results=100&futureevents=true'
+u = 'http://www.google.com/calendar/feeds/ja.japanese%23holiday@group.v.calendar.google.com/public/full'
+params = {
+ 'alt': 'json',
+ 'max-results': 100,
+ 'futureevents': 'true'
+}
-data = requests.get(u).json()
+res = requests.get(u, params=params)
+res.raise_for_status()
+
+data = res.json()
print data.get('feed').get('title').get('$t')
for item in data.get('feed').get('entry'):
+ # ['title']['$t']
+ title = item['title']['$t']
# gd$when
- # 'title']['$t']
- title = item['title']['$t']
day = item['gd$when'][0]['startTime']
print "{name}\t{day}".format(name = title.encode('utf8'), day=day.encode('utf8')) |
a8b2f1ad738709fb49c5e3f9175822cc1bafa11d | dataset/__init__.py | dataset/__init__.py | import os
# shut up useless SA warning:
import warnings
warnings.filterwarnings(
'ignore', 'Unicode type received non-unicode bind param value.')
from dataset.persistence.util import sqlite_datetime_fix
from dataset.persistence.database import Database
from dataset.persistence.table import Table
from dataset.freeze.app import freeze
__all__ = ['Database', 'Table', 'freeze', 'connect']
def connect(url=None, schema=None, reflectMetadata=True, engine_kwargs=None):
"""
Opens a new connection to a database. *url* can be any valid `SQLAlchemy engine URL`_.
If *url* is not defined it will try to use *DATABASE_URL* from environment variable.
Returns an instance of :py:class:`Database <dataset.Database>`. Set *reflectMetadata* to False if you
don't want the entire database schema to be pre-loaded. This significantly speeds up
connecting to large databases with lots of tables.
::
db = dataset.connect('sqlite:///factbook.db')
.. _SQLAlchemy Engine URL: http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine
"""
if url is None:
url = os.environ.get('DATABASE_URL', url)
if url.startswith("sqlite://"):
sqlite_datetime_fix()
return Database(url, schema=schema, reflectMetadata=reflectMetadata,
engine_kwargs=engine_kwargs)
| import os
# shut up useless SA warning:
import warnings
warnings.filterwarnings(
'ignore', 'Unicode type received non-unicode bind param value.')
from dataset.persistence.util import sqlite_datetime_fix
from dataset.persistence.database import Database
from dataset.persistence.table import Table
from dataset.freeze.app import freeze
__all__ = ['Database', 'Table', 'freeze', 'connect']
def connect(url=None, schema=None, reflectMetadata=True, engine_kwargs=None):
"""
Opens a new connection to a database. *url* can be any valid `SQLAlchemy engine URL`_.
If *url* is not defined it will try to use *DATABASE_URL* from environment variable.
Returns an instance of :py:class:`Database <dataset.Database>`. Set *reflectMetadata* to False if you
don't want the entire database schema to be pre-loaded. This significantly speeds up
connecting to large databases with lots of tables. Additionally, *engine_kwargs* will be directly passed to
SQLAlchemy, e.g. set *engine_kwargs={'pool_recycle': 3600}* will avoid `DB connection timeout`_.
::
db = dataset.connect('sqlite:///factbook.db')
.. _SQLAlchemy Engine URL: http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine
.. _DB connection timeout: http://docs.sqlalchemy.org/en/latest/core/pooling.html#setting-pool-recycle
"""
if url is None:
url = os.environ.get('DATABASE_URL', url)
if url.startswith("sqlite://"):
sqlite_datetime_fix()
return Database(url, schema=schema, reflectMetadata=reflectMetadata,
engine_kwargs=engine_kwargs)
| Add more instructions for engine_kwargs, to avoid DB connection timeout issues | Add more instructions for engine_kwargs, to avoid DB connection timeout issues
| Python | mit | askebos/dataset,twds/dataset,stefanw/dataset,vguzmanp/dataset,reubano/dataset,pudo/dataset,saimn/dataset | ---
+++
@@ -18,12 +18,14 @@
If *url* is not defined it will try to use *DATABASE_URL* from environment variable.
Returns an instance of :py:class:`Database <dataset.Database>`. Set *reflectMetadata* to False if you
don't want the entire database schema to be pre-loaded. This significantly speeds up
- connecting to large databases with lots of tables.
+ connecting to large databases with lots of tables. Additionally, *engine_kwargs* will be directly passed to
+ SQLAlchemy, e.g. set *engine_kwargs={'pool_recycle': 3600}* will avoid `DB connection timeout`_.
::
db = dataset.connect('sqlite:///factbook.db')
.. _SQLAlchemy Engine URL: http://docs.sqlalchemy.org/en/latest/core/engines.html#sqlalchemy.create_engine
+ .. _DB connection timeout: http://docs.sqlalchemy.org/en/latest/core/pooling.html#setting-pool-recycle
"""
if url is None:
url = os.environ.get('DATABASE_URL', url) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.