commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
9249f1f2fce010cb0378080b5b7fef55235eefea
|
rnacentral/portal/migrations/0010_add_precomputed_rna_type.py
|
rnacentral/portal/migrations/0010_add_precomputed_rna_type.py
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
]
|
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('portal', '0007_add_precomputed_rna_table'),
]
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=40))
]
|
Drop size of rna_type column to 40
|
Drop size of rna_type column to 40
Largest rna_type is 32 so 40 should give us plenty of buffer space as
per Anton's suggestion.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
---
+++
@@ -12,5 +12,5 @@
operations = [
# rna_type is a / seperated field that represents the set of rna_types
# for a given sequence.
- migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=250))
+ migrations.AddField("RnaPrecomputed", "rna_type", models.CharField(max_length=40))
]
|
80857a9f30b3e6773a658bf8ce93809c0881f80a
|
plugins/liquid_tags/liquid_tags.py
|
plugins/liquid_tags/liquid_tags.py
|
from pelican import signals
from .mdx_liquid_tags import LiquidTags, LT_CONFIG
def addLiquidTags(gen):
if not gen.settings.get('MD_EXTENSIONS'):
from pelican.settings import DEFAULT_CONFIG
gen.settings['MD_EXTENSIONS'] = DEFAULT_CONFIG['MD_EXTENSIONS']
if LiquidTags not in gen.settings['MD_EXTENSIONS']:
configs = dict()
for key,value in LT_CONFIG.items():
configs[key]=value
for key,value in gen.settings.items():
if key in LT_CONFIG:
configs[key]=value
gen.settings['MD_EXTENSIONS'].append(LiquidTags(configs))
def register():
signals.initialized.connect(addLiquidTags)
|
from pelican import signals
from .mdx_liquid_tags import LiquidTags, LT_CONFIG
def addLiquidTags(gen):
if not gen.settings.get('MARKDOWN'):
from pelican.settings import DEFAULT_CONFIG
gen.settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']
if LiquidTags not in gen.settings['MARKDOWN']:
configs = dict()
for key,value in LT_CONFIG.items():
configs[key]=value
for key,value in gen.settings.items():
if key in LT_CONFIG:
configs[key]=value
gen.settings['MARKDOWN'].setdefault(
'extensions', []
).append(
LiquidTags(configs)
)
def register():
signals.initialized.connect(addLiquidTags)
|
Update to new markdown settings
|
Update to new markdown settings
|
Python
|
apache-2.0
|
danielfrg/danielfrg.github.io-source,danielfrg/danielfrg.github.io-source,danielfrg/danielfrg.github.io-source
|
---
+++
@@ -3,18 +3,23 @@
def addLiquidTags(gen):
- if not gen.settings.get('MD_EXTENSIONS'):
+ if not gen.settings.get('MARKDOWN'):
from pelican.settings import DEFAULT_CONFIG
- gen.settings['MD_EXTENSIONS'] = DEFAULT_CONFIG['MD_EXTENSIONS']
+ gen.settings['MARKDOWN'] = DEFAULT_CONFIG['MARKDOWN']
- if LiquidTags not in gen.settings['MD_EXTENSIONS']:
+ if LiquidTags not in gen.settings['MARKDOWN']:
configs = dict()
for key,value in LT_CONFIG.items():
configs[key]=value
for key,value in gen.settings.items():
if key in LT_CONFIG:
configs[key]=value
- gen.settings['MD_EXTENSIONS'].append(LiquidTags(configs))
+ gen.settings['MARKDOWN'].setdefault(
+ 'extensions', []
+ ).append(
+ LiquidTags(configs)
+ )
+
def register():
|
43434cc8efa52b56a64d52076b3760131456c34c
|
.bin/broadcast_any_song.py
|
.bin/broadcast_any_song.py
|
#!/usr/bin/env python2
################################################################################
# broadcast_any_song.py
#
# Uses the Exfm REST API to broadcast a song, (basically scours Tumblr for an
# audio file matching a query then sends it to PiFM.)
#
# Maintained By: Ryan Jacobs <ryan.mjacobs@gmail.com>
#
# May 18, 2014 -> Creation date.
################################################################################
# Global Variables
NC_HOST="gamma"
NC_PORT=1234
CHANNEL=94.3
import os # to execute shell commands
import sys # arguments
import json # json parsing
import urllib2 # url parsing and downloading
if not len(sys.argv) > 1:
print('Usage: ' + sys.argv[0] + ' <search term>')
exit(1)
json_url = urllib2.urlopen("http://ex.fm/api/v3/song/search/%s"% "+".join(sys.argv[1:]))
parsed_json = json.loads(json_url.read())
song_url = parsed_json["songs"][0]["url"]
os.system("wget -O - " + song_url + " | nc " + str(NC_HOST) + " " + str(NC_PORT))
|
#!/usr/bin/env python2
################################################################################
# broadcast_any_song.py
#
# Uses the Exfm REST API to broadcast a song, (basically scours Tumblr for an
# audio file matching a query then sends it to PiFM.)
#
# Maintained By: Ryan Jacobs <ryan.mjacobs@gmail.com>
#
# May 18, 2014 -> Creation date.
################################################################################
# Global Variables
NC_HOST="gamma"
NC_PORT=1234
import os # to execute shell commands
import sys # arguments
import json # json parsing
import urllib2 # url parsing and downloading
if not len(sys.argv) > 1:
print('Usage: ' + sys.argv[0] + ' <search term>')
exit(1)
json_url = urllib2.urlopen("http://ex.fm/api/v3/song/search/%s"% "+".join(sys.argv[1:]))
parsed_json = json.loads(json_url.read())
song_url = parsed_json["songs"][0]["url"]
os.system("wget -O - " + song_url + " | nc " + str(NC_HOST) + " " + str(NC_PORT))
|
Remove CHANNEL. Why is it even there?
|
Remove CHANNEL. Why is it even there?
|
Python
|
mit
|
ryanmjacobs/ryans_dotfiles,ryanmjacobs/ryans_dotfiles
|
---
+++
@@ -13,7 +13,6 @@
# Global Variables
NC_HOST="gamma"
NC_PORT=1234
-CHANNEL=94.3
import os # to execute shell commands
import sys # arguments
|
388c938c0604bbf432921ad46be8325b1e74fa4a
|
direct/src/showbase/TkGlobal.py
|
direct/src/showbase/TkGlobal.py
|
""" This module is now vestigial. """
import sys, Pmw
# This is required by the ihooks.py module used by Squeeze (used by
# pandaSqueezer.py) so that Pmw initializes properly
if '_Pmw' in sys.modules:
sys.modules['_Pmw'].__name__ = '_Pmw'
def spawnTkLoop():
base.spawnTkLoop()
|
""" This module is now vestigial. """
from Tkinter import *
import sys, Pmw
# This is required by the ihooks.py module used by Squeeze (used by
# pandaSqueezer.py) so that Pmw initializes properly
if '_Pmw' in sys.modules:
sys.modules['_Pmw'].__name__ = '_Pmw'
def spawnTkLoop():
base.spawnTkLoop()
|
Add import for backward compatibility
|
Add import for backward compatibility
|
Python
|
bsd-3-clause
|
ee08b397/panda3d,hj3938/panda3d,mgracer48/panda3d,chandler14362/panda3d,grimfang/panda3d,mgracer48/panda3d,chandler14362/panda3d,brakhane/panda3d,chandler14362/panda3d,cc272309126/panda3d,grimfang/panda3d,matthiascy/panda3d,mgracer48/panda3d,chandler14362/panda3d,Wilee999/panda3d,ee08b397/panda3d,jjkoletar/panda3d,jjkoletar/panda3d,hj3938/panda3d,Wilee999/panda3d,ee08b397/panda3d,cc272309126/panda3d,grimfang/panda3d,jjkoletar/panda3d,ee08b397/panda3d,hj3938/panda3d,grimfang/panda3d,Wilee999/panda3d,Wilee999/panda3d,chandler14362/panda3d,tobspr/panda3d,cc272309126/panda3d,mgracer48/panda3d,Wilee999/panda3d,brakhane/panda3d,grimfang/panda3d,matthiascy/panda3d,grimfang/panda3d,brakhane/panda3d,tobspr/panda3d,brakhane/panda3d,tobspr/panda3d,jjkoletar/panda3d,chandler14362/panda3d,grimfang/panda3d,mgracer48/panda3d,brakhane/panda3d,jjkoletar/panda3d,cc272309126/panda3d,brakhane/panda3d,hj3938/panda3d,grimfang/panda3d,hj3938/panda3d,mgracer48/panda3d,chandler14362/panda3d,matthiascy/panda3d,mgracer48/panda3d,tobspr/panda3d,tobspr/panda3d,ee08b397/panda3d,hj3938/panda3d,grimfang/panda3d,chandler14362/panda3d,Wilee999/panda3d,grimfang/panda3d,brakhane/panda3d,jjkoletar/panda3d,cc272309126/panda3d,tobspr/panda3d,tobspr/panda3d,matthiascy/panda3d,mgracer48/panda3d,matthiascy/panda3d,brakhane/panda3d,ee08b397/panda3d,ee08b397/panda3d,ee08b397/panda3d,cc272309126/panda3d,chandler14362/panda3d,Wilee999/panda3d,matthiascy/panda3d,Wilee999/panda3d,jjkoletar/panda3d,cc272309126/panda3d,matthiascy/panda3d,tobspr/panda3d,ee08b397/panda3d,tobspr/panda3d,hj3938/panda3d,hj3938/panda3d,mgracer48/panda3d,Wilee999/panda3d,cc272309126/panda3d,tobspr/panda3d,cc272309126/panda3d,chandler14362/panda3d,brakhane/panda3d,hj3938/panda3d,matthiascy/panda3d,matthiascy/panda3d,jjkoletar/panda3d,jjkoletar/panda3d
|
---
+++
@@ -1,5 +1,6 @@
""" This module is now vestigial. """
+from Tkinter import *
import sys, Pmw
# This is required by the ihooks.py module used by Squeeze (used by
|
72de9e47015e2018ca13c6d4681a79e53c2d5475
|
brabeion/models.py
|
brabeion/models.py
|
from datetime import datetime
from django.db import models
from django.contrib.auth.models import User
class BadgeAward(models.Model):
user = models.ForeignKey(User, related_name="badges_earned")
awarded_at = models.DateTimeField(default=datetime.now)
slug = models.CharField(max_length=255)
level = models.IntegerField()
def __getattr__(self, attr):
return getattr(self._badge, attr)
@property
def badge(self):
return self
@property
def _badge(self):
from brabeion import badges
return badges._registry[self.slug]
@property
def name(self):
return self._badge.levels[self.level].name
@property
def description(self):
return self._badge.levels[self.level].description
@property
def progress(self):
return self._badge.progress(self.user, self.level)
|
from datetime import datetime
from django.contrib.auth.models import User
from django.db import models
from django.utils import timezone
class BadgeAward(models.Model):
user = models.ForeignKey(User, related_name="badges_earned")
awarded_at = models.DateTimeField(default=timezone.now)
slug = models.CharField(max_length=255)
level = models.IntegerField()
def __getattr__(self, attr):
return getattr(self._badge, attr)
@property
def badge(self):
return self
@property
def _badge(self):
from brabeion import badges
return badges._registry[self.slug]
@property
def name(self):
return self._badge.levels[self.level].name
@property
def description(self):
return self._badge.levels[self.level].description
@property
def progress(self):
return self._badge.progress(self.user, self.level)
|
Use timezone-aware dates with BadgeAward if desired
|
Use timezone-aware dates with BadgeAward if desired
|
Python
|
bsd-3-clause
|
kinsights/brabeion
|
---
+++
@@ -1,14 +1,14 @@
from datetime import datetime
+from django.contrib.auth.models import User
from django.db import models
-
-from django.contrib.auth.models import User
+from django.utils import timezone
class BadgeAward(models.Model):
user = models.ForeignKey(User, related_name="badges_earned")
- awarded_at = models.DateTimeField(default=datetime.now)
+ awarded_at = models.DateTimeField(default=timezone.now)
slug = models.CharField(max_length=255)
level = models.IntegerField()
|
6eea9e787107a83be36b03d93cddfe7fdf1e9e05
|
tools/skp/page_sets/skia_amazon_desktop.py
|
tools/skp/page_sets/skia_amazon_desktop.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path = 'data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaAmazonDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_amazon_desktop.json')
urls_list = [
# Why: #1 world commerce website by visits; #3 commerce in the US by time
# spent.
'http://www.amazon.com',
]
for url in urls_list:
self.AddPage(SkiaBuildbotDesktopPage(url, self))
|
Add wait to amazon page set to avoid tab crashes
|
Add wait to amazon page set to avoid tab crashes
BUG=skia:3049
TBR=borenet
NOTRY=true
Review URL: https://codereview.chromium.org/686133002
|
Python
|
bsd-3-clause
|
HalCanary/skia-hc,samuelig/skia,noselhq/skia,spezi77/android_external_skia,w3nd1go/android_external_skia,geekboxzone/mmallow_external_skia,YUPlayGodDev/platform_external_skia,AOSP-YU/platform_external_skia,PAC-ROM/android_external_skia,Infinitive-OS/platform_external_skia,UBERMALLOW/external_skia,scroggo/skia,AOSPB/external_skia,chenlian2015/skia_from_google,rubenvb/skia,shahrzadmn/skia,VRToxin-AOSP/android_external_skia,PAC-ROM/android_external_skia,TeamTwisted/external_skia,w3nd1go/android_external_skia,google/skia,PAC-ROM/android_external_skia,jtg-gg/skia,vanish87/skia,qrealka/skia-hc,shahrzadmn/skia,AOSP-YU/platform_external_skia,DiamondLovesYou/skia-sys,noselhq/skia,vanish87/skia,Hikari-no-Tenshi/android_external_skia,TeamTwisted/external_skia,ominux/skia,TeamExodus/external_skia,DiamondLovesYou/skia-sys,geekboxzone/mmallow_external_skia,HalCanary/skia-hc,amyvmiwei/skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,vanish87/skia,UBERMALLOW/external_skia,qrealka/skia-hc,todotodoo/skia,rubenvb/skia,MinimalOS-AOSP/platform_external_skia,OneRom/external_skia,boulzordev/android_external_skia,UBERMALLOW/external_skia,nfxosp/platform_external_skia,tmpvar/skia.cc,noselhq/skia,Igalia/skia,HalCanary/skia-hc,pcwalton/skia,nfxosp/platform_external_skia,BrokenROM/external_skia,samuelig/skia,tmpvar/skia.cc,Jichao/skia,rubenvb/skia,nvoron23/skia,YUPlayGodDev/platform_external_skia,AOSP-YU/platform_external_skia,timduru/platform-external-skia,jtg-gg/skia,DiamondLovesYou/skia-sys,amyvmiwei/skia,AOSPB/external_skia,jtg-gg/skia,AOSPB/external_skia,todotodoo/skia,geekboxzone/mmallow_external_skia,nvoron23/skia,pcwalton/skia,nfxosp/platform_external_skia,TeamExodus/external_skia,TeamTwisted/external_skia,timduru/platform-external-skia,TeamExodus/external_skia,todotodoo/skia,pcwalton/skia,BrokenROM/external_skia,Infinitive-OS/platform_external_skia,pcwalton/skia,shahrzadmn/skia,VRToxin-AOSP/android_external_skia,scroggo/skia,nfxosp/platform_external_skia,MonkeyZZZZ/platform_external_skia,geekboxzone/mmallow_external_skia,Igalia/skia,qrealka/skia-hc,TeamExodus/external_skia,rubenvb/skia,samuelig/skia,samuelig/skia,YUPlayGodDev/platform_external_skia,vanish87/skia,tmpvar/skia.cc,HalCanary/skia-hc,TeamExodus/external_skia,nvoron23/skia,BrokenROM/external_skia,ominux/skia,ominux/skia,BrokenROM/external_skia,Hikari-no-Tenshi/android_external_skia,DiamondLovesYou/skia-sys,google/skia,nvoron23/skia,MinimalOS-AOSP/platform_external_skia,amyvmiwei/skia,w3nd1go/android_external_skia,boulzordev/android_external_skia,tmpvar/skia.cc,qrealka/skia-hc,qrealka/skia-hc,shahrzadmn/skia,MonkeyZZZZ/platform_external_skia,rubenvb/skia,boulzordev/android_external_skia,spezi77/android_external_skia,invisiblek/android_external_skia,ominux/skia,MonkeyZZZZ/platform_external_skia,MarshedOut/android_external_skia,invisiblek/android_external_skia,google/skia,HalCanary/skia-hc,MarshedOut/android_external_skia,rubenvb/skia,invisiblek/android_external_skia,noselhq/skia,aosp-mirror/platform_external_skia,nvoron23/skia,Igalia/skia,Jichao/skia,qrealka/skia-hc,HalCanary/skia-hc,tmpvar/skia.cc,AOSP-YU/platform_external_skia,AOSP-YU/platform_external_skia,Infinitive-OS/platform_external_skia,amyvmiwei/skia,w3nd1go/android_external_skia,shahrzadmn/skia,Infinitive-OS/platform_external_skia,chenlian2015/skia_from_google,todotodoo/skia,nfxosp/platform_external_skia,AOSP-YU/platform_external_skia,OneRom/external_skia,rubenvb/skia,nfxosp/platform_external_skia,geekboxzone/mmallow_external_skia,DiamondLovesYou/skia-sys,boulzordev/android_external_skia,YUPlayGodDev/platform_external_skia,samuelig/skia,invisiblek/android_external_skia,tmpvar/skia.cc,google/skia,chenlian2015/skia_from_google,timduru/platform-external-skia,MinimalOS-AOSP/platform_external_skia,UBERMALLOW/external_skia,MinimalOS-AOSP/platform_external_skia,UBERMALLOW/external_skia,chenlian2015/skia_from_google,OneRom/external_skia,OneRom/external_skia,TeamTwisted/external_skia,UBERMALLOW/external_skia,MinimalOS-AOSP/platform_external_skia,AOSPB/external_skia,YUPlayGodDev/platform_external_skia,MonkeyZZZZ/platform_external_skia,BrokenROM/external_skia,geekboxzone/mmallow_external_skia,TeamExodus/external_skia,google/skia,OneRom/external_skia,MarshedOut/android_external_skia,VRToxin-AOSP/android_external_skia,MarshedOut/android_external_skia,VRToxin-AOSP/android_external_skia,MinimalOS-AOSP/platform_external_skia,PAC-ROM/android_external_skia,pcwalton/skia,timduru/platform-external-skia,vanish87/skia,Igalia/skia,scroggo/skia,noselhq/skia,HalCanary/skia-hc,amyvmiwei/skia,invisiblek/android_external_skia,HalCanary/skia-hc,AOSP-YU/platform_external_skia,boulzordev/android_external_skia,aosp-mirror/platform_external_skia,vanish87/skia,ominux/skia,MinimalOS-AOSP/platform_external_skia,Infinitive-OS/platform_external_skia,scroggo/skia,TeamTwisted/external_skia,jtg-gg/skia,MarshedOut/android_external_skia,boulzordev/android_external_skia,nfxosp/platform_external_skia,Hikari-no-Tenshi/android_external_skia,ominux/skia,w3nd1go/android_external_skia,w3nd1go/android_external_skia,aosp-mirror/platform_external_skia,vanish87/skia,TeamExodus/external_skia,google/skia,YUPlayGodDev/platform_external_skia,chenlian2015/skia_from_google,MarshedOut/android_external_skia,samuelig/skia,boulzordev/android_external_skia,noselhq/skia,BrokenROM/external_skia,Jichao/skia,VRToxin-AOSP/android_external_skia,nfxosp/platform_external_skia,w3nd1go/android_external_skia,todotodoo/skia,UBERMALLOW/external_skia,Infinitive-OS/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,DiamondLovesYou/skia-sys,google/skia,google/skia,shahrzadmn/skia,Igalia/skia,MonkeyZZZZ/platform_external_skia,Hikari-no-Tenshi/android_external_skia,tmpvar/skia.cc,boulzordev/android_external_skia,UBERMALLOW/external_skia,AOSPB/external_skia,timduru/platform-external-skia,shahrzadmn/skia,DiamondLovesYou/skia-sys,MonkeyZZZZ/platform_external_skia,VRToxin-AOSP/android_external_skia,todotodoo/skia,TeamTwisted/external_skia,VRToxin-AOSP/android_external_skia,UBERMALLOW/external_skia,Igalia/skia,google/skia,nvoron23/skia,PAC-ROM/android_external_skia,spezi77/android_external_skia,invisiblek/android_external_skia,scroggo/skia,google/skia,scroggo/skia,scroggo/skia,noselhq/skia,OneRom/external_skia,invisiblek/android_external_skia,invisiblek/android_external_skia,nvoron23/skia,TeamTwisted/external_skia,PAC-ROM/android_external_skia,OneRom/external_skia,YUPlayGodDev/platform_external_skia,YUPlayGodDev/platform_external_skia,jtg-gg/skia,jtg-gg/skia,pcwalton/skia,MarshedOut/android_external_skia,Jichao/skia,TeamTwisted/external_skia,samuelig/skia,Infinitive-OS/platform_external_skia,geekboxzone/mmallow_external_skia,Jichao/skia,HalCanary/skia-hc,BrokenROM/external_skia,VRToxin-AOSP/android_external_skia,rubenvb/skia,Jichao/skia,jtg-gg/skia,Hikari-no-Tenshi/android_external_skia,Jichao/skia,pcwalton/skia,noselhq/skia,TeamTwisted/external_skia,vanish87/skia,BrokenROM/external_skia,MarshedOut/android_external_skia,timduru/platform-external-skia,qrealka/skia-hc,VRToxin-AOSP/android_external_skia,spezi77/android_external_skia,MarshedOut/android_external_skia,nvoron23/skia,AOSPB/external_skia,ominux/skia,aosp-mirror/platform_external_skia,scroggo/skia,todotodoo/skia,spezi77/android_external_skia,Hikari-no-Tenshi/android_external_skia,PAC-ROM/android_external_skia,aosp-mirror/platform_external_skia,todotodoo/skia,MonkeyZZZZ/platform_external_skia,samuelig/skia,rubenvb/skia,aosp-mirror/platform_external_skia,MonkeyZZZZ/platform_external_skia,YUPlayGodDev/platform_external_skia,amyvmiwei/skia,aosp-mirror/platform_external_skia,ominux/skia,pcwalton/skia,Igalia/skia,Hikari-no-Tenshi/android_external_skia,amyvmiwei/skia,Infinitive-OS/platform_external_skia,shahrzadmn/skia,tmpvar/skia.cc,vanish87/skia,w3nd1go/android_external_skia,MinimalOS-AOSP/platform_external_skia,todotodoo/skia,geekboxzone/mmallow_external_skia,MinimalOS-AOSP/platform_external_skia,boulzordev/android_external_skia,ominux/skia,pcwalton/skia,Jichao/skia,Igalia/skia,tmpvar/skia.cc,TeamExodus/external_skia,Infinitive-OS/platform_external_skia,HalCanary/skia-hc,noselhq/skia,TeamExodus/external_skia,AOSPB/external_skia,AOSPB/external_skia,spezi77/android_external_skia,shahrzadmn/skia,AOSPB/external_skia,nfxosp/platform_external_skia,OneRom/external_skia,chenlian2015/skia_from_google,aosp-mirror/platform_external_skia,Jichao/skia,rubenvb/skia,qrealka/skia-hc,amyvmiwei/skia,OneRom/external_skia,chenlian2015/skia_from_google,MonkeyZZZZ/platform_external_skia,PAC-ROM/android_external_skia,AOSP-YU/platform_external_skia,nvoron23/skia,timduru/platform-external-skia,AOSP-YU/platform_external_skia,geekboxzone/mmallow_external_skia,w3nd1go/android_external_skia
|
---
+++
@@ -18,6 +18,10 @@
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_amazon_desktop.json'
+ def RunNavigateSteps(self, action_runner):
+ action_runner.NavigateToPage(self)
+ action_runner.Wait(15)
+
class SkiaAmazonDesktopPageSet(page_set_module.PageSet):
|
6ab0607e6182f7c4dec55b6318ab07af746e7c89
|
compose/__init__.py
|
compose/__init__.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.8.0'
|
from __future__ import absolute_import
from __future__ import unicode_literals
__version__ = '1.9.0dev'
|
Switch back to dev version
|
Switch back to dev version
Signed-off-by: Joffrey F <2e95f49799afcec0080c0aeb8813776d949e0768@docker.com>
|
Python
|
apache-2.0
|
thaJeztah/compose,shin-/compose,andrewgee/compose,thaJeztah/compose,shin-/compose,michael-k/docker-compose,sdurrheimer/compose,dnephin/compose,hoogenm/compose,andrewgee/compose,dbdd4us/compose,swoopla/compose,hoogenm/compose,swoopla/compose,jrabbit/compose,schmunk42/compose,schmunk42/compose,funkyfuture/docker-compose,dbdd4us/compose,vdemeester/compose,sdurrheimer/compose,funkyfuture/docker-compose,michael-k/docker-compose,dnephin/compose,GM-Alex/compose,GM-Alex/compose,vdemeester/compose,jrabbit/compose
|
---
+++
@@ -1,4 +1,4 @@
from __future__ import absolute_import
from __future__ import unicode_literals
-__version__ = '1.8.0'
+__version__ = '1.9.0dev'
|
edd4f01065b7bac6c7400d8c1496375fbe0a9aa5
|
app/timetables/models.py
|
app/timetables/models.py
|
from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.CharField(max_length=60)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
class Weekday(models.Model):
"""Model representing the day of the week."""
name = models.CharField(max_length=60, unique=True)
def clean(self):
"""
Capitalize the first letter of the first word to avoid case
insensitive duplicates for name field.
"""
self.name = self.name.capitalize()
def save(self, *args, **kwargs):
self.clean()
return super(Weekday, self).save(*args, **kwargs)
class Meal(models.Model):
name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
def __str__(self):
return self.name
|
Add unique constraint on Meal name field
|
Add unique constraint on Meal name field
|
Python
|
mit
|
teamtaverna/core
|
---
+++
@@ -21,7 +21,7 @@
class Meal(models.Model):
- name = models.CharField(max_length=60)
+ name = models.CharField(max_length=60, unique=True)
start_time = models.TimeField()
end_time = models.TimeField()
|
d2076f6fd3a0bb687224048de904207c885aba5c
|
utils.py
|
utils.py
|
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
|
from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
def constants(namespace, names):
for name in names:
namespace[name] = Constant(name)
|
Make it easier to define constants
|
Make it easier to define constants
|
Python
|
unlicense
|
drkitty/python3-base,drkitty/python3-base
|
---
+++
@@ -20,3 +20,8 @@
def __repr__(self):
return self.x
+
+
+def constants(namespace, names):
+ for name in names:
+ namespace[name] = Constant(name)
|
07f2de7775cd73e220bebe3d6ea8cfe5604de174
|
utils.py
|
utils.py
|
import errno
import os
import socket
from contextlib import closing
import plumbum
# http://stackoverflow.com/a/166589
# Create a UDP socket to the internet at large to get our routed IP
def get_routed_ip():
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as s:
s.connect(("8.8.8.8", 53)) # Google DNS, but doesn't really matter
return s.getsockname()[0]
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
# exists already, fine.
pass
else:
raise
def git_status():
git = plumbum.local["git"]
desc = git('describe', '--contains', '--all').strip()
fulldesc = git('describe', '--all', '--long', '--dirty').strip()
fulldate = git('show', '-s', '--format=%ci').strip()
date = fulldate.split()[0]
mods = git['diff', '--no-ext-diff', '--quiet'] & plumbum.TF(1)
# short git description: date plus dirty marker
gitshort = "%s-%s%s" % (desc, date, '-*' if mods else '')
gitlong = "%s\n%s" % (fulldesc, fulldate)
return (gitshort, gitlong)
|
import errno
import os
import socket
from contextlib import closing
import plumbum
# http://stackoverflow.com/a/166589
# Create a UDP socket to the internet at large to get our routed IP
def get_routed_ip():
with closing(socket.socket(socket.AF_INET, socket.SOCK_DGRAM)) as s:
s.connect(("8.8.8.8", 53)) # Google DNS, but doesn't really matter
return s.getsockname()[0]
def mkdir(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno == errno.EEXIST and os.path.isdir(path):
# exists already, fine.
pass
else:
raise
def git_status():
git = plumbum.local["git"]
branch = git('describe', '--contains', '--all', 'HEAD').strip()
fulldesc = git('describe', '--all', '--long', '--dirty').strip()
fulldate = git('show', '-s', '--format=%ci').strip()
date = fulldate.split()[0]
mods = git['diff', '--no-ext-diff', '--quiet'] & plumbum.TF(1)
# short git description: date plus dirty marker
gitshort = "%s-%s%s" % (branch, date, '-*' if mods else '')
gitlong = "%s\n%s" % (fulldesc, fulldate)
return (gitshort, gitlong)
|
Fix branch name getting on older git version.
|
Fix branch name getting on older git version.
|
Python
|
mit
|
liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS,liffiton/ATLeS
|
---
+++
@@ -28,13 +28,13 @@
def git_status():
git = plumbum.local["git"]
- desc = git('describe', '--contains', '--all').strip()
+ branch = git('describe', '--contains', '--all', 'HEAD').strip()
fulldesc = git('describe', '--all', '--long', '--dirty').strip()
fulldate = git('show', '-s', '--format=%ci').strip()
date = fulldate.split()[0]
mods = git['diff', '--no-ext-diff', '--quiet'] & plumbum.TF(1)
# short git description: date plus dirty marker
- gitshort = "%s-%s%s" % (desc, date, '-*' if mods else '')
+ gitshort = "%s-%s%s" % (branch, date, '-*' if mods else '')
gitlong = "%s\n%s" % (fulldesc, fulldate)
return (gitshort, gitlong)
|
deb5a6c45d6f52daef7ca5752f574d7c14abbc47
|
admin/base/urls.py
|
admin/base/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(
base_pattern,
include([
url(r'^$', views.home, name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^account/', include('admin.common_auth.urls', namespace='auth')),
url(r'^password/', include('password_reset.urls')),
url(r'^nodes/', include('admin.nodes.urls', namespace='nodes')),
url(r'^users/', include('admin.users.urls', namespace='users')),
url(r'^meetings/', include('admin.meetings.urls',
namespace='meetings')),
url(r'^project/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^metrics/', include('admin.metrics.urls',
namespace='metrics')),
url(r'^desk/', include('admin.desk.urls',
namespace='desk')),
]),
),
]
admin.site.site_header = 'OSF-Admin administration'
|
from django.conf.urls import include, url
from django.contrib import admin
from settings import ADMIN_BASE
from . import views
base_pattern = '^{}'.format(ADMIN_BASE)
urlpatterns = [
### ADMIN ###
url(
base_pattern,
include([
url(r'^$', views.home, name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^spam/', include('admin.spam.urls', namespace='spam')),
url(r'^account/', include('admin.common_auth.urls', namespace='auth')),
url(r'^password/', include('password_reset.urls')),
url(r'^nodes/', include('admin.nodes.urls', namespace='nodes')),
url(r'^preprints/', include('admin.preprints.urls', namespace='preprints')),
url(r'^users/', include('admin.users.urls', namespace='users')),
url(r'^meetings/', include('admin.meetings.urls',
namespace='meetings')),
url(r'^project/', include('admin.pre_reg.urls', namespace='pre_reg')),
url(r'^metrics/', include('admin.metrics.urls',
namespace='metrics')),
url(r'^desk/', include('admin.desk.urls',
namespace='desk')),
]),
),
]
admin.site.site_header = 'OSF-Admin administration'
|
Add preprints to the sidebar
|
Add preprints to the sidebar
[#OSF-7198]
|
Python
|
apache-2.0
|
mattclark/osf.io,caseyrollins/osf.io,aaxelb/osf.io,icereval/osf.io,felliott/osf.io,cwisecarver/osf.io,adlius/osf.io,crcresearch/osf.io,caneruguz/osf.io,cslzchen/osf.io,pattisdr/osf.io,leb2dg/osf.io,mattclark/osf.io,mfraezz/osf.io,caseyrollins/osf.io,baylee-d/osf.io,chrisseto/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,aaxelb/osf.io,cslzchen/osf.io,adlius/osf.io,laurenrevere/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,felliott/osf.io,mfraezz/osf.io,saradbowman/osf.io,hmoco/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,binoculars/osf.io,felliott/osf.io,chennan47/osf.io,TomBaxter/osf.io,hmoco/osf.io,leb2dg/osf.io,felliott/osf.io,baylee-d/osf.io,hmoco/osf.io,cslzchen/osf.io,cslzchen/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,caneruguz/osf.io,caneruguz/osf.io,leb2dg/osf.io,erinspace/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,icereval/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,crcresearch/osf.io,sloria/osf.io,icereval/osf.io,Nesiehr/osf.io,sloria/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,TomBaxter/osf.io,binoculars/osf.io,binoculars/osf.io,baylee-d/osf.io,adlius/osf.io,laurenrevere/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,pattisdr/osf.io,chrisseto/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,chennan47/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,cwisecarver/osf.io,hmoco/osf.io,mfraezz/osf.io,pattisdr/osf.io,sloria/osf.io,Nesiehr/osf.io,mfraezz/osf.io,brianjgeiger/osf.io
|
---
+++
@@ -18,6 +18,7 @@
url(r'^account/', include('admin.common_auth.urls', namespace='auth')),
url(r'^password/', include('password_reset.urls')),
url(r'^nodes/', include('admin.nodes.urls', namespace='nodes')),
+ url(r'^preprints/', include('admin.preprints.urls', namespace='preprints')),
url(r'^users/', include('admin.users.urls', namespace='users')),
url(r'^meetings/', include('admin.meetings.urls',
namespace='meetings')),
|
5681e5ee342ee4660e634a2081d499940a3b0d3f
|
pytest_pycharm.py
|
pytest_pycharm.py
|
# -*- coding: utf-8 -*-
import threading
def pytest_exception_interact(node, call, report):
"""
Drop into PyCharm debugger, if available, on uncaught exceptions.
"""
try:
import pydevd
from pydevd import pydevd_tracing
except ImportError:
pass
else:
exctype, value, traceback = call.excinfo._excinfo
frames = []
while traceback:
frames.append(traceback.tb_frame)
traceback = traceback.tb_next
thread = threading.current_thread()
frames_by_id = dict([(id(frame), frame) for frame in frames])
frame = frames[-1]
exception = (exctype, value, traceback)
thread.additional_info.pydev_message = 'test fail'
try:
debugger = pydevd.debugger
except AttributeError:
debugger = pydevd.get_global_debugger()
pydevd_tracing.SetTrace(None) # no tracing from here
try:
debugger.stop_on_unhandled_exception(thread, frame, frames_by_id, exception)
except AttributeError:
# fallback to pre PyCharm 2019.2 API
debugger.handle_post_mortem_stop(thread, frame, frames_by_id, exception)
return report
|
# -*- coding: utf-8 -*-
import threading
def pytest_exception_interact(node, call, report):
"""
Drop into PyCharm debugger, if available, on uncaught exceptions.
"""
try:
import pydevd
from pydevd import pydevd_tracing
except ImportError:
pass
else:
exctype, value, traceback = call.excinfo._excinfo
frames = []
while traceback:
frames.append(traceback.tb_frame)
traceback = traceback.tb_next
thread = threading.current_thread()
frames_by_id = dict([(id(frame), frame) for frame in frames])
frame = frames[-1]
exception = (exctype, value, traceback)
if hasattr(thread, "additional_info"):
thread.additional_info.pydev_message = "test fail"
try:
debugger = pydevd.debugger
except AttributeError:
debugger = pydevd.get_global_debugger()
pydevd_tracing.SetTrace(None) # no tracing from here
try:
debugger.stop_on_unhandled_exception(thread, frame, frames_by_id, exception)
except AttributeError:
# fallback to pre PyCharm 2019.2 API
debugger.handle_post_mortem_stop(thread, frame, frames_by_id, exception)
return report
|
Set `pydev_message` only if `additional_info` exists.
|
Set `pydev_message` only if `additional_info` exists.
|
Python
|
bsd-3-clause
|
jlubcke/pytest-pycharm
|
---
+++
@@ -21,7 +21,8 @@
frames_by_id = dict([(id(frame), frame) for frame in frames])
frame = frames[-1]
exception = (exctype, value, traceback)
- thread.additional_info.pydev_message = 'test fail'
+ if hasattr(thread, "additional_info"):
+ thread.additional_info.pydev_message = "test fail"
try:
debugger = pydevd.debugger
except AttributeError:
|
e773239cbf0adee5ca192c10dba14d35275040ff
|
yarn_api_client/__init__.py
|
yarn_api_client/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '1.0.0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
# -*- coding: utf-8 -*-
__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
|
Prepare for next development iteration
|
Prepare for next development iteration
|
Python
|
bsd-3-clause
|
toidi/hadoop-yarn-api-python-client
|
---
+++
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-__version__ = '1.0.0'
+__version__ = '2.0.0.dev0'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
|
7f690132df6672978990589dd584f2101f592389
|
tests/test_helper.py
|
tests/test_helper.py
|
import unittest
from generator.helper import Helper
class TestHelper(unittest.TestCase):
def test_parse_definition_string(self):
data = ' * @var FooClass | BarClass '
class_list = Helper.parse_definition_string(data)
expected = ['FooClass', 'BarClass']
self.assertEqual(expected, class_list)
def test_strip_extra(self):
data = ' * @var FooClass '
class_def = Helper.strip_extra(data)
expected = 'FooClass'
self.assertEqual(expected, class_def)
def test_get_var_name(self):
data = 'FooClass'
var_name = Helper.get_var_name(data)
expected = 'fooClass'
self.assertEquals(expected, var_name)
if __name__ == '__main__':
unittest.main()
|
import unittest
from generator.helper import Helper
class TestHelper(unittest.TestCase):
def test_parse_definition_string(self):
data = ' * @var FooClass | BarClass '
class_list = Helper.parse_definition_string(data)
expected = ['FooClass', 'BarClass']
self.assertEqual(expected, class_list)
def test_strip_extra(self):
data = ' * @var FooClass '
class_def = Helper.strip_extra(data)
expected = 'FooClass'
self.assertEqual(expected, class_def)
def test_get_var_name(self):
data = 'FooClass'
var_name = Helper.get_var_name(data)
expected = 'fooClass'
self.assertEqual(expected, var_name)
if __name__ == '__main__':
unittest.main()
|
Fix a warning about using assertEquals
|
Fix a warning about using assertEquals
|
Python
|
apache-2.0
|
HappyRay/php-di-generator
|
---
+++
@@ -20,7 +20,7 @@
data = 'FooClass'
var_name = Helper.get_var_name(data)
expected = 'fooClass'
- self.assertEquals(expected, var_name)
+ self.assertEqual(expected, var_name)
if __name__ == '__main__':
|
c8af52e91eb5ea40090a4b303e147c2d5d6cf28a
|
cloudbaseinit/shell.py
|
cloudbaseinit/shell.py
|
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo_config import cfg
from oslo_log import log as oslo_logging
from cloudbaseinit import init
from cloudbaseinit.utils import log as logging
CONF = cfg.CONF
LOG = oslo_logging.getLogger(__name__)
def main():
CONF(sys.argv[1:])
logging.setup('cloudbaseinit')
try:
init.InitManager().configure_host()
except Exception as exc:
LOG.exception(exc)
raise
if __name__ == "__main__":
main()
|
# Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import pythoncom
sys.coinit_flags = pythoncom.COINIT_MULTITHREADED
pythoncom.CoInitializeEx(pythoncom.COINIT_MULTITHREADED)
from oslo_config import cfg
from oslo_log import log as oslo_logging
from cloudbaseinit import init
from cloudbaseinit.utils import log as logging
CONF = cfg.CONF
LOG = oslo_logging.getLogger(__name__)
def main():
CONF(sys.argv[1:])
logging.setup('cloudbaseinit')
try:
init.InitManager().configure_host()
except Exception as exc:
LOG.exception(exc)
raise
if __name__ == "__main__":
main()
|
Fix py3 x64 crash thread related
|
Fix py3 x64 crash thread related
Change-Id: Iac00ea2463df4346ad60a17d0ba9a2af089c87cd
|
Python
|
apache-2.0
|
chialiang-8/cloudbase-init,stackforge/cloudbase-init,openstack/cloudbase-init,stefan-caraiman/cloudbase-init,cmin764/cloudbase-init,alexpilotti/cloudbase-init,ader1990/cloudbase-init
|
---
+++
@@ -13,6 +13,10 @@
# under the License.
import sys
+
+import pythoncom
+sys.coinit_flags = pythoncom.COINIT_MULTITHREADED
+pythoncom.CoInitializeEx(pythoncom.COINIT_MULTITHREADED)
from oslo_config import cfg
from oslo_log import log as oslo_logging
|
30261101fada94b4bd5df0f9c4506a4ac4dd1063
|
examples/sum.py
|
examples/sum.py
|
from numba import d
from numba.decorators import jit as jit
def sum2d(arr):
M, N = arr.shape
result = 0.0
for i in range(M):
for j in range(N):
result += arr[i,j]
return result
csum2d = jit(restype=d, argtypes=[d[:,:]])(sum2d)
from numpy import random
arr = random.randn(100,100)
import time
start = time.time()
res = sum2d(arr)
duration = time.time() - start
print "Result from python is %s in %s (msec)" % (res, duration*1000)
start = time.time()
res = csum2d(arr)
duration2 = time.time() - start
print "Result from compiled is %s in %s (msec)" % (res, duration2*1000)
print "Speed up is %s" % (duration / duration2)
|
from numba import double
from numba.decorators import jit as jit
def sum2d(arr):
M, N = arr.shape
result = 0.0
for i in range(M):
for j in range(N):
result += arr[i,j]
return result
csum2d = jit(restype=double, argtypes=[double[:,:]])(sum2d)
from numpy import random
arr = random.randn(100,100)
import time
start = time.time()
res = sum2d(arr)
duration = time.time() - start
print "Result from python is %s in %s (msec)" % (res, duration*1000)
start = time.time()
res = csum2d(arr)
duration2 = time.time() - start
print "Result from compiled is %s in %s (msec)" % (res, duration2*1000)
print "Speed up is %s" % (duration / duration2)
|
Update type specification in example
|
Update type specification in example
|
Python
|
bsd-2-clause
|
stonebig/numba,seibert/numba,seibert/numba,stefanseefeld/numba,jriehl/numba,ssarangi/numba,IntelLabs/numba,GaZ3ll3/numba,pombredanne/numba,stuartarchibald/numba,gdementen/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,pitrou/numba,gdementen/numba,cpcloud/numba,stefanseefeld/numba,GaZ3ll3/numba,cpcloud/numba,ssarangi/numba,stefanseefeld/numba,numba/numba,numba/numba,pitrou/numba,cpcloud/numba,seibert/numba,sklam/numba,stuartarchibald/numba,pombredanne/numba,shiquanwang/numba,gdementen/numba,jriehl/numba,stefanseefeld/numba,jriehl/numba,stonebig/numba,stonebig/numba,pombredanne/numba,stefanseefeld/numba,shiquanwang/numba,numba/numba,ssarangi/numba,stuartarchibald/numba,stonebig/numba,stonebig/numba,sklam/numba,pitrou/numba,ssarangi/numba,IntelLabs/numba,GaZ3ll3/numba,shiquanwang/numba,pitrou/numba,gmarkall/numba,gmarkall/numba,cpcloud/numba,gdementen/numba,jriehl/numba,ssarangi/numba,pombredanne/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,numba/numba,pitrou/numba,sklam/numba,gmarkall/numba,IntelLabs/numba,pombredanne/numba,sklam/numba,gmarkall/numba,IntelLabs/numba,GaZ3ll3/numba,seibert/numba,jriehl/numba,seibert/numba,gdementen/numba,sklam/numba,GaZ3ll3/numba
|
---
+++
@@ -1,4 +1,4 @@
-from numba import d
+from numba import double
from numba.decorators import jit as jit
def sum2d(arr):
@@ -9,7 +9,7 @@
result += arr[i,j]
return result
-csum2d = jit(restype=d, argtypes=[d[:,:]])(sum2d)
+csum2d = jit(restype=double, argtypes=[double[:,:]])(sum2d)
from numpy import random
arr = random.randn(100,100)
|
bf8ddb49a1043f399a210e05e66e2db4d815cc22
|
tests/test_build_chess.py
|
tests/test_build_chess.py
|
# -*- coding: utf-8 -*-
from app.chess.chess import Chess
import unittest
class TestBuildChess(unittest.TestCase):
"""
`TestBuildChess()` class is unit-testing the class
Chess().
"""
# ///////////////////////////////////////////////////
def setUp(self):
params = [4, 4]
pieces = {'King': 2, 'Queen': 1, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
params.append(pieces)
self.chess = Chess(params)
# ///////////////////////////////////////////////////
def test_solve(self):
"""Tests validity of solution"""
self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True)
self.assertEqual(self.chess.number_pieces == 3, True)
# self.assertEqual(self.chess.solutions == 1, True)
def test_solution_only_kings(self):
params = [5, 5]
pieces = {'King': 2, 'Queen': 0, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
params.append(pieces)
self.chess = Chess(params)
self.chess.run_game()
self.assertEqual(self.chess.solutions == 228, True)
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
from chess.chess import Chess
import unittest
class TestBuildChess(unittest.TestCase):
"""
`TestBuildChess()` class is unit-testing the class
Chess().
"""
# ///////////////////////////////////////////////////
def setUp(self):
params = [4, 4]
pieces = {'King': 2, 'Queen': 1, 'Bishop': 1, 'Rook': 1, 'Knight': 1}
params.append(pieces)
self.chess = Chess(params)
# ///////////////////////////////////////////////////
def test_build_chess(self):
"""Tests validity of build chessboard"""
self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True)
self.assertEqual(self.chess.number_pieces == 6, True)
def test_solution_only_kings(self):
params = [5, 5]
pieces = {'King': 2, 'Queen': 0, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
params.append(pieces)
self.chess = Chess(params)
self.chess.run_game()
self.assertEqual(self.chess.solutions == 228, True)
def test_solution_only_queens(self):
params = [5, 5]
pieces = {'King': 0, 'Queen': 2, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
params.append(pieces)
self.chess = Chess(params)
self.chess.run_game()
self.assertEqual(self.chess.solutions == 140, True)
if __name__ == '__main__':
unittest.main()
|
Add a TDD function ( queens on chess board)
|
Add a TDD function ( queens on chess board)
|
Python
|
mit
|
aymguesmi/ChessChallenge
|
---
+++
@@ -1,8 +1,9 @@
# -*- coding: utf-8 -*-
-from app.chess.chess import Chess
+from chess.chess import Chess
import unittest
+
class TestBuildChess(unittest.TestCase):
@@ -14,17 +15,16 @@
# ///////////////////////////////////////////////////
def setUp(self):
params = [4, 4]
- pieces = {'King': 2, 'Queen': 1, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
+ pieces = {'King': 2, 'Queen': 1, 'Bishop': 1, 'Rook': 1, 'Knight': 1}
params.append(pieces)
self.chess = Chess(params)
# ///////////////////////////////////////////////////
- def test_solve(self):
- """Tests validity of solution"""
+ def test_build_chess(self):
+ """Tests validity of build chessboard"""
self.assertEqual(self.chess.pieces_types == ['K', 'K', 'Q'], True)
- self.assertEqual(self.chess.number_pieces == 3, True)
-# self.assertEqual(self.chess.solutions == 1, True)
-
+ self.assertEqual(self.chess.number_pieces == 6, True)
+
def test_solution_only_kings(self):
params = [5, 5]
pieces = {'King': 2, 'Queen': 0, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
@@ -33,7 +33,13 @@
self.chess.run_game()
self.assertEqual(self.chess.solutions == 228, True)
+ def test_solution_only_queens(self):
+ params = [5, 5]
+ pieces = {'King': 0, 'Queen': 2, 'Bishop': 0, 'Rook': 0, 'Knight': 0}
+ params.append(pieces)
+ self.chess = Chess(params)
+ self.chess.run_game()
+ self.assertEqual(self.chess.solutions == 140, True)
if __name__ == '__main__':
unittest.main()
-
|
639356ecf1b19b8546b5a4691d13a34d4b46d583
|
src/app.py
|
src/app.py
|
import os
import flask
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('index.html')
@app.route('/hello')
def hello():
return 'hello'
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port, debug=True)
|
import os
import json
import random
import flask
from hashlib import md5
records = {}
# Create a hash table of all records.
for record in json.loads(open('data/records-2015.json').read())['records']:
records[md5(str(record['id']).encode('utf-8')).hexdigest()] = record
app = flask.Flask(__name__)
@app.route('/')
def index():
return flask.render_template('index.html')
@app.route('/hello')
def hello():
return 'hello'
@app.route('/random')
def random_record():
record_hash = random.choice(list(records.keys()))
return str(records[record_hash])
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, debug=True)
|
Load records from JSON & expose them
|
Load records from JSON & expose them
|
Python
|
mit
|
judsonsam/tekautoday,judsonsam/tekautoday,judsonsam/tekautoday,judsonsam/tekautoday
|
---
+++
@@ -1,5 +1,15 @@
import os
+import json
+import random
import flask
+
+from hashlib import md5
+
+records = {}
+
+# Create a hash table of all records.
+for record in json.loads(open('data/records-2015.json').read())['records']:
+ records[md5(str(record['id']).encode('utf-8')).hexdigest()] = record
app = flask.Flask(__name__)
@@ -13,6 +23,12 @@
def hello():
return 'hello'
+
+@app.route('/random')
+def random_record():
+ record_hash = random.choice(list(records.keys()))
+ return str(records[record_hash])
+
if __name__ == '__main__':
- port = int(os.environ.get("PORT", 5000))
+ port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port, debug=True)
|
d97d9294bf470c6e95958b0dc51391830c56a7b3
|
thinglang/parser/values/inline_code.py
|
thinglang/parser/values/inline_code.py
|
from thinglang.lexer.lexical_token import LexicalToken
from thinglang.utils.type_descriptors import ValueType
class InlineCode(LexicalToken, ValueType):
"""
Represents inline C++ code
"""
STATIC = True
def __init__(self, value, source_ref):
super(InlineCode, self).__init__(value, source_ref)
self.children = []
def tree(self, depth):
return self.value
def finalize(self):
pass
|
from thinglang.lexer.lexical_token import LexicalToken
from thinglang.utils.type_descriptors import ValueType
class InlineCode(LexicalToken, ValueType):
"""
Represents inline C++ code
"""
STATIC = True
def __init__(self, value, source_ref):
super(InlineCode, self).__init__(value, source_ref)
self.children = []
self.indent = 0
def tree(self, depth):
return self.value
def finalize(self):
pass
|
Allow inline code to be transpiled
|
Allow inline code to be transpiled
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
---
+++
@@ -12,6 +12,7 @@
def __init__(self, value, source_ref):
super(InlineCode, self).__init__(value, source_ref)
self.children = []
+ self.indent = 0
def tree(self, depth):
return self.value
|
a775cd66de2bbc2e176f946e93fe9c0636cf7115
|
documents/views/utils.py
|
documents/views/utils.py
|
from django.http import HttpResponse
def render_to_mimetype_response(mimetype, filename, outputFile):
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s\"" % (filename)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
return response
|
from django.http import HttpResponse
import os
def render_to_mimetype_response(mimetype, filename, outputFile):
response = HttpResponse(mimetype=mimetype)
response['Content-Disposition'] = "attachment; filename=\"%s\"" % (filename)
f = open(outputFile)
try:
content = f.read()
response.write(content)
finally:
f.close()
# remove the tmp file
os.remove(outputFile)
return response
|
Remove temporary files after delivering them
|
Remove temporary files after delivering them
|
Python
|
agpl-3.0
|
sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer
|
---
+++
@@ -1,4 +1,5 @@
from django.http import HttpResponse
+import os
def render_to_mimetype_response(mimetype, filename, outputFile):
response = HttpResponse(mimetype=mimetype)
@@ -11,5 +12,8 @@
finally:
f.close()
+ # remove the tmp file
+ os.remove(outputFile)
+
return response
|
905a2b0ebc2a7e825f93fe1411dc598524dfd843
|
examples/hello/server.py
|
examples/hello/server.py
|
import os
import avro.protocol
import tornado.web
import tornado.ioloop
import tornavro.server
import tornavro.responder
class HelloResponder(tornavro.responder.Responder):
def hello(self, name):
return 'Hello, %s' % name
proto = open(os.path.join(os.path.dirname(__file__), 'hello.avpr')).read()
proto = avro.protocol.parse(proto)
responder = HelloResponder(proto)
server = tornavro.server.AvroServer(responder)
server.listen(8888)
tornado.ioloop.IOLoop.instance().start()
|
import os
import avro.protocol
import tornado.web
import tornado.ioloop
from tornado.options import define, options
import tornavro.server
import tornavro.responder
define('port', default=8888, help='Listen on this port')
class HelloResponder(tornavro.responder.Responder):
def hello(self, name):
return 'Hello, %s' % name
tornado.options.parse_command_line()
proto = open(os.path.join(os.path.dirname(__file__), 'hello.avpr')).read()
proto = avro.protocol.parse(proto)
responder = HelloResponder(proto)
server = tornavro.server.AvroServer(responder)
server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
Make the hello protocol example more user-friendly
|
Make the hello protocol example more user-friendly
|
Python
|
mit
|
richid/tornavro
|
---
+++
@@ -3,9 +3,13 @@
import avro.protocol
import tornado.web
import tornado.ioloop
+from tornado.options import define, options
import tornavro.server
import tornavro.responder
+
+
+define('port', default=8888, help='Listen on this port')
class HelloResponder(tornavro.responder.Responder):
@@ -13,10 +17,12 @@
return 'Hello, %s' % name
+tornado.options.parse_command_line()
+
proto = open(os.path.join(os.path.dirname(__file__), 'hello.avpr')).read()
proto = avro.protocol.parse(proto)
responder = HelloResponder(proto)
server = tornavro.server.AvroServer(responder)
-server.listen(8888)
+server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
|
33e40319b5d670c3fa1a1423bf7eed1865115d5c
|
sitetools/venv_hook/sitecustomize.py
|
sitetools/venv_hook/sitecustomize.py
|
"""
This file serves as a hook into virtualenvs that do NOT have sitetools
installed.
It is added to the $PYTHONPATH by the `dev` command so that new virtualenvs
can refer to the sitetools from the old virtualenv.
It tries to play nice by looking for the next sitecustomize module.
"""
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
"""
This file serves as a hook into virtualenvs that do NOT have sitetools
installed.
It is added to the $PYTHONPATH by the `dev` command so that new virtualenvs
can refer to the sitetools from the old virtualenv.
It tries to play nice by looking for the next sitecustomize module.
"""
import imp
import os
import sys
import warnings
try:
try:
import sitetools._startup
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
sys.path.append(os.path.abspath(os.path.join(
__file__,
'..', '..', '..'
)))
# Let this ImportError raise.
import sitetools._startup
except Exception as e:
warnings.warn("Error while importing sitetools._startup: %s" % e)
# Be a good citizen and find the next sitecustomize module.
my_path = os.path.dirname(os.path.abspath(__file__))
clean_path = [x for x in sys.path if os.path.abspath(x) != my_path]
try:
args = imp.find_module('sitecustomize', clean_path)
except ImportError:
pass
else:
imp.load_module('sitecustomize', *args)
|
Adjust venv_hook to work in new location
|
Adjust venv_hook to work in new location
|
Python
|
bsd-3-clause
|
westernx/sitetools,westernx/sitetools,mikeboers/sitetools
|
---
+++
@@ -22,7 +22,10 @@
except ImportError:
# Pull in the sitetools that goes with this sitecustomize.
- sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
+ sys.path.append(os.path.abspath(os.path.join(
+ __file__,
+ '..', '..', '..'
+ )))
# Let this ImportError raise.
import sitetools._startup
|
b9eb6ac32a12ef912edc237409c0433cd139aaf6
|
tapioca_harvest/tapioca_harvest.py
|
tapioca_harvest/tapioca_harvest.py
|
# coding: utf-8
from tapioca import (
TapiocaAdapter, generate_wrapper_from_adapter, JSONAdapterMixin)
from requests.auth import HTTPBasicAuth
from .resource_mapping import RESOURCE_MAPPING
class HarvestClientAdapter(JSONAdapterMixin, TapiocaAdapter):
resource_mapping = RESOURCE_MAPPING
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(HarvestClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
params['auth'] = HTTPBasicAuth(
api_params.get('user'), api_params.get('password'))
params['headers'] = params.get('headers', {})
params['headers']['Accept'] = 'application/json'
return params
def get_iterator_list(self, response_data):
return response_data
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
pass
def response_to_native(self, response):
if response.content.strip():
return super(HarvestClientAdapter, self).response_to_native(response)
def get_api_root(self, api_params):
return 'https://' + api_params['company_name'] + '.harvestapp.com/'
Harvest = generate_wrapper_from_adapter(HarvestClientAdapter)
|
# coding: utf-8
from tapioca import (
TapiocaAdapter, generate_wrapper_from_adapter, JSONAdapterMixin)
from requests.auth import HTTPBasicAuth
from .resource_mapping import RESOURCE_MAPPING
class HarvestClientAdapter(JSONAdapterMixin, TapiocaAdapter):
resource_mapping = RESOURCE_MAPPING
api_root = 'https://api.harvestapp.com/v2/'
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(HarvestClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
headers = {
'Authorization': 'Bearer %s' % params.get('token', ''),
'Harvest-Account-Id': params.get('account_id', ''),
'User-Agent': params.get('user_agent', '')
}
params['headers'] = params.get('headers', headers)
params['headers']['Accept'] = 'application/json'
return params
def get_iterator_list(self, response_data):
return response_data
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
pass
def response_to_native(self, response):
if response.content.strip():
return super(HarvestClientAdapter, self).response_to_native(response)
Harvest = generate_wrapper_from_adapter(HarvestClientAdapter)
|
Change auth method from password to token on adapter
|
Change auth method from password to token on adapter
|
Python
|
mit
|
vintasoftware/tapioca-harvest
|
---
+++
@@ -11,15 +11,19 @@
class HarvestClientAdapter(JSONAdapterMixin, TapiocaAdapter):
resource_mapping = RESOURCE_MAPPING
+ api_root = 'https://api.harvestapp.com/v2/'
def get_request_kwargs(self, api_params, *args, **kwargs):
params = super(HarvestClientAdapter, self).get_request_kwargs(
api_params, *args, **kwargs)
- params['auth'] = HTTPBasicAuth(
- api_params.get('user'), api_params.get('password'))
+ headers = {
+ 'Authorization': 'Bearer %s' % params.get('token', ''),
+ 'Harvest-Account-Id': params.get('account_id', ''),
+ 'User-Agent': params.get('user_agent', '')
+ }
- params['headers'] = params.get('headers', {})
+ params['headers'] = params.get('headers', headers)
params['headers']['Accept'] = 'application/json'
return params
@@ -35,8 +39,5 @@
if response.content.strip():
return super(HarvestClientAdapter, self).response_to_native(response)
- def get_api_root(self, api_params):
- return 'https://' + api_params['company_name'] + '.harvestapp.com/'
-
Harvest = generate_wrapper_from_adapter(HarvestClientAdapter)
|
1ad4dba5d2dcfdfc9062f334204bd75b789b3ba6
|
webapp/calendars/forms.py
|
webapp/calendars/forms.py
|
from django import forms
from django.contrib.admin import widgets
from datetimewidget.widgets import DateTimeWidget
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField(label='Nazwa użytkownika')
password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
data_time_options = {
'format': 'dd-mm-yyyy HH:ii'
}
def dt_widget():
return DateTimeWidget(
bootstrap_version=3,
usel10n=True,
options=data_time_options
)
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'description',
'categories', 'start_time',
'end_time', 'image', 'place',
)
widgets = {
'start_time': dt_widget(),
'end_time': dt_widget(),
}
|
from django import forms
from django.contrib.admin import widgets
from datetimewidget.widgets import DateTimeWidget
from .models import Event
class LoginForm(forms.Form):
username = forms.CharField(label='Nazwa użytkownika')
password = forms.CharField(label='Hasło', widget=forms.PasswordInput())
data_time_options = {
'format': 'dd-mm-yyyy HH:ii'
}
def dt_widget():
return DateTimeWidget(
bootstrap_version=3,
usel10n=True,
options=data_time_options
)
class EventForm(forms.ModelForm):
class Meta:
model = Event
fields = (
'title', 'place',
'description', 'categories',
'start_time', 'end_time',
'image', 'url',
)
widgets = {
'start_time': dt_widget(),
'end_time': dt_widget(),
}
|
Change fields order and add field url.
|
Change fields order and add field url.
Signed-off-by: Mariusz Fik <e22610367d206dca7aa58af34ebf008b556228c5@fidano.pl>
|
Python
|
agpl-3.0
|
Fisiu/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim,Fisiu/calendar-oswiecim,Fisiu/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim,hackerspace-silesia/calendar-oswiecim,firemark/calendar-oswiecim
|
---
+++
@@ -26,9 +26,10 @@
class Meta:
model = Event
fields = (
- 'title', 'description',
- 'categories', 'start_time',
- 'end_time', 'image', 'place',
+ 'title', 'place',
+ 'description', 'categories',
+ 'start_time', 'end_time',
+ 'image', 'url',
)
widgets = {
'start_time': dt_widget(),
|
67d7ce2d9e8ffe26f5f5a97aca9cfb99c8914f3e
|
us_ignite/common/tests/utils.py
|
us_ignite/common/tests/utils.py
|
from django.core.urlresolvers import reverse
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
|
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
class TestMessagesBackend(BaseStorage):
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
|
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
|
Add ``TestMessagesBackend`` for testing ``django.contrib.messages``.
When unit testing a django view the ``messages`` middleware
will be missing. This backend will provision a simple
messaging midleware.
Usage::
from django.test import client
from us_ignite.common.tests import utils
factory = client.RequestFactory()
request = factory.get('/')
request._messages = utils.TestMessagesBackend(request)
The messages will be added to this simple backend.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
---
+++
@@ -1,6 +1,17 @@
from django.core.urlresolvers import reverse
+from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
+
+
+class TestMessagesBackend(BaseStorage):
+ def __init__(self, request, *args, **kwargs):
+ self._loaded_data = []
+ super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
+
+ def add(self, level, message, extra_tags=''):
+ self._loaded_data.append(
+ Message(level, message, extra_tags=extra_tags))
|
1104ef0db6b174c64aa9ddad4df10a790fda13cf
|
grammpy/StringGrammar.py
|
grammpy/StringGrammar.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return super().term(StringGrammar.__to_string_arr(term))
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
if isinstance(term, str):
return res[0]
return res
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 23.06.2017 16:39
:Licence GNUv3
Part of grammpy
"""
from .RawGrammar import RawGrammar as Grammar
class StringGrammar(Grammar):
@staticmethod
def __to_string_arr(t):
if isinstance(t, str):
return [t]
return t
def remove_term(self, term=None):
return super().remove_term(StringGrammar.__to_string_arr(term))
def add_term(self, term):
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
return self.get_term(term)
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
if isinstance(term, str):
return res[0]
return res
def have_term(self, term):
return super().have_term(StringGrammar.__to_string_arr(term))
|
Fix return of Terminal instance when term method accept string
|
Fix return of Terminal instance when term method accept string
|
Python
|
mit
|
PatrikValkovic/grammpy
|
---
+++
@@ -24,7 +24,7 @@
return super().add_term(StringGrammar.__to_string_arr(term))
def term(self, term=None):
- return super().term(StringGrammar.__to_string_arr(term))
+ return self.get_term(term)
def get_term(self, term=None):
res = super().get_term(StringGrammar.__to_string_arr(term))
|
257315a2e0b3f23db36bb97813849d5cf425f81f
|
murano_tempest_tests/tests/api/service_broker/test_service_broker_negative.py
|
murano_tempest_tests/tests/api/service_broker/test_service_broker_negative.py
|
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import test
from tempest_lib import exceptions
from murano_tempest_tests.tests.api.service_broker import base
from murano_tempest_tests import utils
class ServiceBrokerNegativeTest(base.BaseServiceBrokerAdminTest):
@test.attr(type=['gate', 'negative'])
def test_get_status_with_not_present_instance_id(self):
not_present_instance_id = utils.generate_uuid()
# TODO(freerunner) Tempest REST client can't catch code 410 yet.
# Need to update the test, when tempest-lib will have this code.
self.assertRaises(
exceptions.UnexpectedResponseCode,
self.service_broker_client.get_last_status,
not_present_instance_id)
|
# Copyright (c) 2015 Mirantis, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from tempest import test
from tempest_lib import exceptions
from murano_tempest_tests.tests.api.service_broker import base
from murano_tempest_tests import utils
class ServiceBrokerNegativeTest(base.BaseServiceBrokerAdminTest):
# NOTE(freerunner): Tempest will fail with this test, because its
# _parse_resp function trying to parse a nullable JSON.
# https://review.openstack.org/#/c/260659/
# XFail until this one merged and tempest-lib released.
@unittest.expectedFailure
@test.attr(type=['gate', 'negative'])
def test_get_status_with_not_present_instance_id(self):
not_present_instance_id = utils.generate_uuid()
self.assertRaises(
exceptions.Gone,
self.service_broker_client.get_last_status,
not_present_instance_id)
|
Adjust '410 Gone' exception in service broker negative tests
|
Adjust '410 Gone' exception in service broker negative tests
Tempest-lib 0.12.0 contains necessary code for checking correct exception.
This patch replaces 'UnexpectedResponceCode' exception to correct 'Gone' exception.
Aslo, this patch add expectedFail for negative test.
Change-Id: Ib460b6fc495060a1bd6dd7d0443ff983f8e9f06b
Related-Bug: #1527949
|
Python
|
apache-2.0
|
openstack/murano,satish-avninetworks/murano,olivierlemasle/murano,NeCTAR-RC/murano,satish-avninetworks/murano,olivierlemasle/murano,satish-avninetworks/murano,NeCTAR-RC/murano,NeCTAR-RC/murano,NeCTAR-RC/murano,DavidPurcell/murano_temp,DavidPurcell/murano_temp,satish-avninetworks/murano,openstack/murano,olivierlemasle/murano,olivierlemasle/murano,DavidPurcell/murano_temp,DavidPurcell/murano_temp
|
---
+++
@@ -13,6 +13,8 @@
# License for the specific language governing permissions and limitations
# under the License.
+import unittest
+
from tempest import test
from tempest_lib import exceptions
@@ -22,12 +24,15 @@
class ServiceBrokerNegativeTest(base.BaseServiceBrokerAdminTest):
+ # NOTE(freerunner): Tempest will fail with this test, because its
+ # _parse_resp function trying to parse a nullable JSON.
+ # https://review.openstack.org/#/c/260659/
+ # XFail until this one merged and tempest-lib released.
+ @unittest.expectedFailure
@test.attr(type=['gate', 'negative'])
def test_get_status_with_not_present_instance_id(self):
not_present_instance_id = utils.generate_uuid()
- # TODO(freerunner) Tempest REST client can't catch code 410 yet.
- # Need to update the test, when tempest-lib will have this code.
self.assertRaises(
- exceptions.UnexpectedResponseCode,
+ exceptions.Gone,
self.service_broker_client.get_last_status,
not_present_instance_id)
|
99f61b183da7fb1f7e22377e0347418a852846e3
|
dyfunconn/fc/crosscorr.py
|
dyfunconn/fc/crosscorr.py
|
# -*- coding: utf-8 -*-
""" Cross Correlation
see @https://docs.scipy.org/doc/numpy/reference/generated/numpy.correlate.html
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .estimator import Estimator
from ..analytic_signal import analytic_signal
import numpy as np
def crosscorr(data, fb, fs, pairs=None):
"""
Parameters
----------
Returns
-------
"""
n_channels, _ = np.shape(data)
filtered, _, _ = analytic_signal(data, fb, fs)
r = np.zeros([n_channels, n_channels], dtype=np.float32)
for i in range(n_channels):
for ii in range(n_channels):
r[i, ii] = np.correlate(filtered[i, ], filtered[ii, ], mode='valid')
return r
|
# -*- coding: utf-8 -*-
""" Cross Correlation
see @https://docs.scipy.org/doc/numpy/reference/generated/numpy.correlate.html
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
from .estimator import Estimator
from ..analytic_signal import analytic_signal
import numpy as np
def crosscorr(data, fb, fs, pairs=None):
"""
Parameters
----------
Returns
-------
"""
n_channels, _ = np.shape(data)
_, _, filtered = analytic_signal(data, fb, fs)
r = np.zeros([n_channels, n_channels], dtype=np.float32)
for i in range(n_channels):
for ii in range(n_channels):
r[i, ii] = np.correlate(filtered[i,], filtered[ii,], mode="valid")
return r
|
Fix the order of the returned values from `analytic_signal`.
|
Fix the order of the returned values from `analytic_signal`.
|
Python
|
bsd-3-clause
|
makism/dyfunconn
|
---
+++
@@ -25,12 +25,12 @@
"""
n_channels, _ = np.shape(data)
- filtered, _, _ = analytic_signal(data, fb, fs)
+ _, _, filtered = analytic_signal(data, fb, fs)
r = np.zeros([n_channels, n_channels], dtype=np.float32)
for i in range(n_channels):
for ii in range(n_channels):
- r[i, ii] = np.correlate(filtered[i, ], filtered[ii, ], mode='valid')
+ r[i, ii] = np.correlate(filtered[i,], filtered[ii,], mode="valid")
return r
|
74dfabb565dbd6581a300091c045067d0398e899
|
source/jormungandr/jormungandr/interfaces/v1/Coverage.py
|
source/jormungandr/jormungandr/interfaces/v1/Coverage.py
|
# coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
|
# coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
from make_links import add_coverage_link, add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
from fields import NonNullNested
region_fields = {
"id": fields.String(attribute="region_id"),
"start_production_date": fields.String,
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
"error": NonNullNested({
"code": fields.String,
"value": fields.String
})
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
])
collections = collections_to_resource_type.keys()
class Coverage(Resource):
@clean_links()
@add_coverage_link()
@add_collection_links(collections)
@marshal_with(regions_fields)
def get(self, region=None, lon=None, lat=None):
return i_manager.regions(region, lon, lat), 200
|
Add error field to region
|
Jormungandr: Add error field to region
|
Python
|
agpl-3.0
|
VincentCATILLON/navitia,prhod/navitia,xlqian/navitia,prhod/navitia,prhod/navitia,xlqian/navitia,ballouche/navitia,is06/navitia,pbougue/navitia,ballouche/navitia,kadhikari/navitia,CanalTP/navitia,VincentCATILLON/navitia,frodrigo/navitia,CanalTP/navitia,pbougue/navitia,francois-vincent/navitia,TeXitoi/navitia,kinnou02/navitia,stifoon/navitia,frodrigo/navitia,Tisseo/navitia,kadhikari/navitia,stifoon/navitia,kinnou02/navitia,kinnou02/navitia,djludo/navitia,kadhikari/navitia,patochectp/navitia,xlqian/navitia,thiphariel/navitia,patochectp/navitia,xlqian/navitia,fueghan/navitia,fueghan/navitia,fueghan/navitia,CanalTP/navitia,lrocheWB/navitia,djludo/navitia,frodrigo/navitia,kadhikari/navitia,francois-vincent/navitia,VincentCATILLON/navitia,datanel/navitia,lrocheWB/navitia,TeXitoi/navitia,francois-vincent/navitia,antoine-de/navitia,ballouche/navitia,is06/navitia,CanalTP/navitia,francois-vincent/navitia,thiphariel/navitia,Tisseo/navitia,lrocheWB/navitia,kinnou02/navitia,VincentCATILLON/navitia,Tisseo/navitia,frodrigo/navitia,antoine-de/navitia,TeXitoi/navitia,patochectp/navitia,TeXitoi/navitia,stifoon/navitia,CanalTP/navitia,pbougue/navitia,antoine-de/navitia,djludo/navitia,is06/navitia,patochectp/navitia,xlqian/navitia,Tisseo/navitia,prhod/navitia,fueghan/navitia,datanel/navitia,djludo/navitia,thiphariel/navitia,lrocheWB/navitia,datanel/navitia,Tisseo/navitia,datanel/navitia,ballouche/navitia,thiphariel/navitia,pbougue/navitia,is06/navitia,antoine-de/navitia,stifoon/navitia
|
---
+++
@@ -1,9 +1,10 @@
# coding=utf-8
from flask.ext.restful import Resource, fields, marshal_with
from jormungandr import i_manager
-from make_links import add_coverage_link, add_collection_links, clean_links
+from make_links import add_coverage_link, add_coverage_link, add_collection_links, clean_links
from converters_collection_type import collections_to_resource_type
from collections import OrderedDict
+from fields import NonNullNested
region_fields = {
@@ -12,6 +13,10 @@
"end_production_date": fields.String,
"status": fields.String,
"shape": fields.String,
+ "error": NonNullNested({
+ "code": fields.String,
+ "value": fields.String
+ })
}
regions_fields = OrderedDict([
("regions", fields.List(fields.Nested(region_fields)))
|
b700e40f65953ea0c87666d38d53e968581611e1
|
auditlog_tests/urls.py
|
auditlog_tests/urls.py
|
import django
from django.conf.urls import include, url
from django.contrib import admin
if django.VERSION < (1, 9):
admin_urls = include(admin.site.urls)
else:
admin_urls = admin.site.urls
urlpatterns = [
url(r'^admin/', admin_urls),
]
|
from django.urls import path
from django.contrib import admin
urlpatterns = [
path("admin/", admin.site.urls),
]
|
Remove old django related codes.
|
Remove old django related codes.
|
Python
|
mit
|
jjkester/django-auditlog
|
---
+++
@@ -1,13 +1,7 @@
-import django
-from django.conf.urls import include, url
+from django.urls import path
from django.contrib import admin
-if django.VERSION < (1, 9):
- admin_urls = include(admin.site.urls)
-else:
- admin_urls = admin.site.urls
-
urlpatterns = [
- url(r'^admin/', admin_urls),
+ path("admin/", admin.site.urls),
]
|
bd878da54d9816779303a0e7ea042c9adaeab993
|
runserver.py
|
runserver.py
|
#!/usr/bin/python
from optparse import OptionParser
from sys import stderr
import pytz
from werkzeug import script
from werkzeug.script import make_runserver
from firmant.wsgi import Application
from firmant.utils import mod_to_dict
from firmant.utils import get_module
parser = OptionParser()
parser.add_option('-s', '--settings',
dest='settings', type='string', default='settings',
help='the settings module to use for the test server.')
parser.add_option('-p', '--port',
dest='port', type='int', default='8080',
help='the port on which to run the test server.')
parser.add_option('-H', '--host',
dest='host', type='string', default='',
help='the host to which the server should bind.')
(options, args) = parser.parse_args()
try:
settings = mod_to_dict(get_module(options.settings))
except ImportError:
stderr.write('Please specify a settings module that can be imported.\n')
exit(1)
def make_app():
return Application(settings)
action_runserver = script.make_runserver(make_app, use_reloader=False)
if __name__ == '__main__':
print 'Starting local WSGI Server'
print 'Please do not use this server for production'
script.run()
|
#!/usr/bin/python
from wsgiref.simple_server import make_server
from optparse import OptionParser
from sys import stderr
import socket
import pytz
from firmant.wsgi import Application
from firmant.utils import mod_to_dict
from firmant.utils import get_module
parser = OptionParser()
parser.add_option('-s', '--settings',
dest='settings', type='string', default='settings',
help='the settings module to use for the test server.')
parser.add_option('-p', '--port',
dest='port', type='int', default='8080',
help='the port on which to run the test server.')
parser.add_option('-H', '--host',
dest='host', type='string', default='',
help='the host to which the server should bind.')
(options, args) = parser.parse_args()
try:
settings = mod_to_dict(get_module(options.settings))
except ImportError:
stderr.write('Please specify a settings module that can be imported.\n')
exit(1)
try:
server = make_server(options.host, options.port, Application(settings))
except socket.error:
stderr.write('Please specify a host/port to which you may bind (the '
'defaults usually work well)\n')
exit(1)
if __name__ == '__main__':
print 'Starting local WSGI Server'
print 'Please do not use this server for production'
print 'Settings: %s' % options.settings
print 'Bound to: http://%s:%i/' % (options.host, options.port)
print '============================================'
server.serve_forever()
|
Revert to old server script (worked better).
|
Revert to old server script (worked better).
The old script did not auto-reload and did display requests made to the
server. The werkzeug-based script's auto-reload feature would mess with
the import magic that powers plugins.
|
Python
|
bsd-3-clause
|
rescrv/firmant
|
---
+++
@@ -1,9 +1,9 @@
#!/usr/bin/python
+from wsgiref.simple_server import make_server
from optparse import OptionParser
from sys import stderr
+import socket
import pytz
-from werkzeug import script
-from werkzeug.script import make_runserver
from firmant.wsgi import Application
from firmant.utils import mod_to_dict
@@ -27,12 +27,18 @@
stderr.write('Please specify a settings module that can be imported.\n')
exit(1)
-def make_app():
- return Application(settings)
+try:
+ server = make_server(options.host, options.port, Application(settings))
+except socket.error:
+ stderr.write('Please specify a host/port to which you may bind (the '
+ 'defaults usually work well)\n')
+ exit(1)
-action_runserver = script.make_runserver(make_app, use_reloader=False)
if __name__ == '__main__':
print 'Starting local WSGI Server'
print 'Please do not use this server for production'
- script.run()
+ print 'Settings: %s' % options.settings
+ print 'Bound to: http://%s:%i/' % (options.host, options.port)
+ print '============================================'
+ server.serve_forever()
|
c91e494f16301789f2ebcc9c245697b379b30eca
|
irco/explorer/filters.py
|
irco/explorer/filters.py
|
import csv
import collections
from StringIO import StringIO
from flask import render_template
from jinja2 import Markup
def init_app(app):
@app.template_filter('format_record')
def format_record(record):
val = record.unparsed_record_value
if 'csv' in record.unparsed_record_format:
data = csv.DictReader(StringIO(val.encode('utf-8')))
data = next(data)
data = [(unicode(k, 'utf-8'), unicode(v, 'utf-8')) for k, v in data.iteritems()]
val = collections.OrderedDict(data)
val = Markup(render_template('dict-table.html', table=val))
return val
|
import csv
import collections
from StringIO import StringIO
from flask import render_template
from jinja2 import Markup
def init_app(app):
@app.template_filter('format_record')
def format_record(record):
val = record.unparsed_record_value
fmt_csv = 'csv' in record.unparsed_record_format
fmt_tsv = 'tsv' in record.unparsed_record_format
if fmt_csv or fmt_tsv:
delimiter = ','
if fmt_tsv:
delimiter = '\t'
data = csv.DictReader(StringIO(val.encode('utf-8')), delimiter=delimiter)
data = next(data)
data = [(unicode(k, 'utf-8'), unicode(v, 'utf-8')) for k, v in data.iteritems()]
val = collections.OrderedDict(data)
val = Markup(render_template('dict-table.html', table=val))
return val
|
Add support to render tsp data in the explorer.
|
Add support to render tsp data in the explorer.
|
Python
|
mit
|
GaretJax/irco,GaretJax/irco,GaretJax/irco,GaretJax/irco
|
---
+++
@@ -10,8 +10,15 @@
@app.template_filter('format_record')
def format_record(record):
val = record.unparsed_record_value
- if 'csv' in record.unparsed_record_format:
- data = csv.DictReader(StringIO(val.encode('utf-8')))
+
+ fmt_csv = 'csv' in record.unparsed_record_format
+ fmt_tsv = 'tsv' in record.unparsed_record_format
+
+ if fmt_csv or fmt_tsv:
+ delimiter = ','
+ if fmt_tsv:
+ delimiter = '\t'
+ data = csv.DictReader(StringIO(val.encode('utf-8')), delimiter=delimiter)
data = next(data)
data = [(unicode(k, 'utf-8'), unicode(v, 'utf-8')) for k, v in data.iteritems()]
val = collections.OrderedDict(data)
|
ecdda95d5d1ed9e3614639c672a860e2467ba3f1
|
django_project/core/settings/prod_docker.py
|
django_project/core/settings/prod_docker.py
|
# noinspection PyUnresolvedReferences
from .prod import * # noqa
import os
print os.environ
ALLOWED_HOSTS = ['*']
ADMINS = (
('Tim Sutton', 'tim@kartoza.com'),
('Ismail Sunni', 'ismail@kartoza.com'),
('Christian Christellis', 'christian@kartoza.com'),
('Akbar Gumbira', 'akbargumbira@gmail.com'),)
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': os.environ['DATABASE_NAME'],
'USER': os.environ['DATABASE_USERNAME'],
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': os.environ['DATABASE_HOST'],
'PORT': 5432,
'TEST_NAME': 'unittests',
}
}
MEDIA_ROOT = '/home/web/media'
STATIC_ROOT = '/home/web/static'
# See fig.yml file for postfix container definition
#
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending e-mail.
EMAIL_HOST = 'smtp'
# Port for sending e-mail.
EMAIL_PORT = 25
# SMTP authentication information for EMAIL_HOST.
# See fig.yml for where these are defined
EMAIL_HOST_USER = 'noreply@kartoza.com'
EMAIL_HOST_PASSWORD = 'docker'
EMAIL_USE_TLS = False
EMAIL_SUBJECT_PREFIX = '[jakarta-flood-maps]'
|
# noinspection PyUnresolvedReferences
from .prod import * # noqa
import os
print os.environ
ALLOWED_HOSTS = ['*']
ADMINS = (
('Tim Sutton', 'tim@kartoza.com'),
('Ismail Sunni', 'ismail@kartoza.com'),
('Christian Christellis', 'christian@kartoza.com'),
('Akbar Gumbira', 'akbargumbira@gmail.com'),)
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': os.environ['DATABASE_NAME'],
'USER': os.environ['DATABASE_USERNAME'],
'PASSWORD': os.environ['DATABASE_PASSWORD'],
'HOST': os.environ['DATABASE_HOST'],
'PORT': 5432,
'TEST_NAME': 'unittests',
}
}
MEDIA_ROOT = '/home/web/media'
STATIC_ROOT = '/home/web/static'
# See fig.yml file for postfix container definition
#
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending e-mail.
EMAIL_HOST = 'smtp'
# Port for sending e-mail.
EMAIL_PORT = 25
# SMTP authentication information for EMAIL_HOST.
# See fig.yml for where these are defined
EMAIL_HOST_USER = 'noreply@kartoza.com'
EMAIL_HOST_PASSWORD = 'docker'
EMAIL_USE_TLS = False
EMAIL_SUBJECT_PREFIX = '[InaSAFE]'
|
Use InaSAFE in the email subject line rather
|
Use InaSAFE in the email subject line rather
|
Python
|
bsd-2-clause
|
AIFDR/inasafe-django,timlinux/inasafe-django,AIFDR/inasafe-django,timlinux/inasafe-django,timlinux/inasafe-django,AIFDR/inasafe-django,timlinux/inasafe-django,AIFDR/inasafe-django
|
---
+++
@@ -38,4 +38,4 @@
EMAIL_HOST_USER = 'noreply@kartoza.com'
EMAIL_HOST_PASSWORD = 'docker'
EMAIL_USE_TLS = False
-EMAIL_SUBJECT_PREFIX = '[jakarta-flood-maps]'
+EMAIL_SUBJECT_PREFIX = '[InaSAFE]'
|
2df61655cc99678e7e7db9d0cf1883c702fdc300
|
python/servo/devenv_commands.py
|
python/servo/devenv_commands.py
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
|
from __future__ import print_function, unicode_literals
import json
import os
import os.path as path
import shutil
import subprocess
import sys
import tarfile
from time import time
import urllib
from mach.registrar import Registrar
from mach.decorators import (
CommandArgument,
CommandProvider,
Command,
)
from servo.command_base import CommandBase
@CommandProvider
class MachCommands(CommandBase):
@Command('cargo',
description='Run Cargo',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to Cargo")
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
@Command('rustc',
description='Run the Rust compiler',
category='devenv',
allow_all_args=True)
@CommandArgument('params', default=None, nargs='...',
help="Command-line arguments to be passed through to rustc")
def run(self, params):
return subprocess.call(["rustc"] + params, env=self.build_env())
|
Add a `mach rustc` command
|
Add a `mach rustc` command
|
Python
|
mpl-2.0
|
youprofit/servo,indykish/servo,evilpie/servo,samfoo/servo,hyowon/servo,Shraddha512/servo,dhananjay92/servo,g-k/servo,aweinstock314/servo,steveklabnik/servo,pyfisch/servo,DominoTree/servo,nerith/servo,WriterOfAlicrow/servo,saratang/servo,CJ8664/servo,nick-thompson/servo,huonw/servo,SimonSapin/servo,tschneidereit/servo,SimonSapin/servo,fiji-flo/servo,runarberg/servo,zentner-kyle/servo,rixrix/servo,avadacatavra/servo,pgonda/servo,dmarcos/servo,nrc/servo,CJ8664/servo,A-deLuna/servo,kindersung/servo,akosel/servo,rnestler/servo,pgonda/servo,rnestler/servo,larsbergstrom/servo,KiChjang/servo,jlegendary/servo,mdibaiee/servo,rentongzhang/servo,thiagopnts/servo,aweinstock314/servo,steveklabnik/servo,tempbottle/servo,srbhklkrn/SERVOENGINE,wartman4404/servo,peterjoel/servo,ryancanhelpyou/servo,tempbottle/servo,emilio/servo,caldwell/servo,pyecs/servo,mdibaiee/servo,nick-thompson/servo,rnestler/servo,wpgallih/servo,bfrohs/servo,dmarcos/servo,saneyuki/servo,tafia/servo,ConnorGBrewster/servo,nick-thompson/servo,aidanhs/servo,nrc/servo,luniv/servo,youprofit/servo,ryancanhelpyou/servo,szeged/servo,akosel/servo,sadmansk/servo,nerith/servo,larsbergstrom/servo,dagnir/servo,nnethercote/servo,WriterOfAlicrow/servo,boghison/servo,aweinstock314/servo,thiagopnts/servo,vks/servo,kindersung/servo,dvberkel/servo,jdramani/servo,aidanhs/servo,wartman4404/servo,g-k/servo,anthgur/servo,fiji-flo/servo,cbrewster/servo,zhangjunlei26/servo,jimberlage/servo,akosel/servo,splav/servo,juzer10/servo,s142857/servo,dati91/servo,Adenilson/prototype-viewing-distance,meh/servo,Adenilson/prototype-viewing-distance,CJ8664/servo,eddyb/servo,hyowon/servo,j3parker/servo,paulrouget/servo,emilio/servo,jdramani/servo,brendandahl/servo,splav/servo,avadacatavra/servo,j3parker/servo,rnestler/servo,brendandahl/servo,dmarcos/servo,sadmansk/servo,g-k/servo,rnestler/servo,aweinstock314/servo,avadacatavra/servo,larsbergstrom/servo,indykish/servo,zhangjunlei26/servo,youprofit/servo,peterjoel/servo,chotchki/servo,Adenilson/prototype-viewing-distance,luniv/servo,rentongzhang/servo,deokjinkim/servo,rnestler/servo,luniv/servo,nnethercote/servo,notriddle/servo,bjwbell/servo,karlito40/servo,tempbottle/servo,saneyuki/servo,sadmansk/servo,CJ8664/servo,runarberg/servo,tschneidereit/servo,brendandahl/servo,deokjinkim/servo,ryancanhelpyou/servo,GreenRecycleBin/servo,nick-thompson/servo,g-k/servo,eddyb/servo,larsbergstrom/servo,youprofit/servo,juzer10/servo,pyecs/servo,caldwell/servo,emilio/servo,froydnj/servo,wpgallih/servo,michaelwu/servo,brendandahl/servo,juzer10/servo,jimberlage/servo,indykish/servo,bfrohs/servo,saneyuki/servo,rixrix/servo,mt2d2/servo,anthgur/servo,mbrubeck/servo,canaltinova/servo,upsuper/servo,srbhklkrn/SERVOENGINE,g-k/servo,nerith/servo,dvberkel/servo,j3parker/servo,Shraddha512/servo,wartman4404/servo,fiji-flo/servo,deokjinkim/servo,mdibaiee/servo,tafia/servo,nnethercote/servo,canaltinova/servo,wpgallih/servo,Adenilson/prototype-viewing-distance,AnthonyBroadCrawford/servo,hyowon/servo,avadacatavra/servo,karlito40/servo,peterjoel/servo,bjwbell/servo,CJ8664/servo,huonw/servo,zhangjunlei26/servo,dhananjay92/servo,AnthonyBroadCrawford/servo,boghison/servo,dsandeephegde/servo,larsbergstrom/servo,shrenikgala/servo,aidanhs/servo,youprofit/servo,echochamber/servo,vks/servo,thiagopnts/servo,ryancanhelpyou/servo,dati91/servo,srbhklkrn/SERVOENGINE,pyfisch/servo,jimberlage/servo,splav/servo,nnethercote/servo,tempbottle/servo,jlegendary/servo,dsandeephegde/servo,ruud-v-a/servo,AnthonyBroadCrawford/servo,luniv/servo,GreenRecycleBin/servo,cbrewster/servo,pyfisch/servo,bfrohs/servo,saneyuki/servo,nick-thompson/servo,ruud-v-a/servo,hyowon/servo,pyfisch/servo,akosel/servo,ryancanhelpyou/servo,kindersung/servo,zentner-kyle/servo,tschneidereit/servo,canaltinova/servo,samfoo/servo,notriddle/servo,splav/servo,saratang/servo,aidanhs/servo,ryancanhelpyou/servo,saneyuki/servo,nick-thompson/servo,notriddle/servo,mattnenterprise/servo,dvberkel/servo,szeged/servo,dmarcos/servo,jimberlage/servo,SimonSapin/servo,WriterOfAlicrow/servo,mt2d2/servo,aidanhs/servo,runarberg/servo,hyowon/servo,GyrosOfWar/servo,jgraham/servo,dati91/servo,samfoo/servo,KiChjang/servo,youprofit/servo,kindersung/servo,nerith/servo,GreenRecycleBin/servo,emilio/servo,deokjinkim/servo,paulrouget/servo,dhananjay92/servo,nnethercote/servo,emilio/servo,j3parker/servo,pyfisch/servo,jimberlage/servo,caldwell/servo,DominoTree/servo,notriddle/servo,mattnenterprise/servo,snf/servo,cbrewster/servo,mdibaiee/servo,mattnenterprise/servo,dvberkel/servo,anthgur/servo,tempbottle/servo,pgonda/servo,deokjinkim/servo,szeged/servo,wartman4404/servo,A-deLuna/servo,rixrix/servo,meh/servo,jimberlage/servo,Adenilson/prototype-viewing-distance,snf/servo,dsandeephegde/servo,ConnorGBrewster/servo,karlito40/servo,chotchki/servo,dati91/servo,pyecs/servo,zhangjunlei26/servo,mattnenterprise/servo,fiji-flo/servo,notriddle/servo,aweinstock314/servo,emilio/servo,rixrix/servo,jlegendary/servo,jlegendary/servo,nrc/servo,s142857/servo,cbrewster/servo,zhangjunlei26/servo,indykish/servo,j3parker/servo,akosel/servo,DominoTree/servo,AnthonyBroadCrawford/servo,kindersung/servo,akosel/servo,CJ8664/servo,dvberkel/servo,mattnenterprise/servo,tschneidereit/servo,caldwell/servo,AnthonyBroadCrawford/servo,brendandahl/servo,srbhklkrn/SERVOENGINE,anthgur/servo,jimberlage/servo,boghison/servo,wartman4404/servo,rnestler/servo,srbhklkrn/SERVOENGINE,SimonSapin/servo,rixrix/servo,aweinstock314/servo,srbhklkrn/SERVOENGINE,jdramani/servo,walac/servo,sadmansk/servo,thiagopnts/servo,KiChjang/servo,dmarcos/servo,dmarcos/servo,hyowon/servo,avadacatavra/servo,szeged/servo,mbrubeck/servo,pgonda/servo,mt2d2/servo,saratang/servo,juzer10/servo,szeged/servo,nrc/servo,michaelwu/servo,codemac/servo,shrenikgala/servo,rentongzhang/servo,mbrubeck/servo,eddyb/servo,shrenikgala/servo,ryancanhelpyou/servo,mattnenterprise/servo,larsbergstrom/servo,cbrewster/servo,tempbottle/servo,WriterOfAlicrow/servo,tafia/servo,wpgallih/servo,walac/servo,CJ8664/servo,snf/servo,paulrouget/servo,snf/servo,evilpie/servo,eddyb/servo,codemac/servo,ConnorGBrewster/servo,bjwbell/servo,codemac/servo,fiji-flo/servo,runarberg/servo,shrenikgala/servo,rentongzhang/servo,srbhklkrn/SERVOENGINE,akosel/servo,DominoTree/servo,jlegendary/servo,mt2d2/servo,nrc/servo,kindersung/servo,GyrosOfWar/servo,mukilan/servo,dati91/servo,bjwbell/servo,fiji-flo/servo,kindersung/servo,fiji-flo/servo,j3parker/servo,Adenilson/prototype-viewing-distance,dvberkel/servo,canaltinova/servo,evilpie/servo,RenaudParis/servo,runarberg/servo,indykish/servo,pyecs/servo,brendandahl/servo,caldwell/servo,rnestler/servo,codemac/servo,tschneidereit/servo,notriddle/servo,evilpie/servo,meh/servo,nrc/servo,evilpie/servo,RenaudParis/servo,mt2d2/servo,splav/servo,A-deLuna/servo,GreenRecycleBin/servo,emilio/servo,michaelwu/servo,mukilan/servo,sadmansk/servo,dagnir/servo,larsbergstrom/servo,nerith/servo,saneyuki/servo,dagnir/servo,wpgallih/servo,echochamber/servo,caldwell/servo,bfrohs/servo,KiChjang/servo,luniv/servo,steveklabnik/servo,michaelwu/servo,hyowon/servo,luniv/servo,jgraham/servo,vks/servo,juzer10/servo,shrenikgala/servo,zhangjunlei26/servo,indykish/servo,A-deLuna/servo,pyfisch/servo,peterjoel/servo,walac/servo,mukilan/servo,karlito40/servo,walac/servo,RenaudParis/servo,nrc/servo,mdibaiee/servo,codemac/servo,huonw/servo,upsuper/servo,paulrouget/servo,froydnj/servo,meh/servo,vks/servo,caldwell/servo,zentner-kyle/servo,avadacatavra/servo,DominoTree/servo,anthgur/servo,zentner-kyle/servo,rentongzhang/servo,jdramani/servo,dvberkel/servo,emilio/servo,paulrouget/servo,mbrubeck/servo,mdibaiee/servo,jgraham/servo,sadmansk/servo,RenaudParis/servo,AnthonyBroadCrawford/servo,saneyuki/servo,GreenRecycleBin/servo,larsbergstrom/servo,nnethercote/servo,jgraham/servo,echochamber/servo,mdibaiee/servo,chotchki/servo,saneyuki/servo,nick-thompson/servo,wartman4404/servo,walac/servo,wpgallih/servo,thiagopnts/servo,WriterOfAlicrow/servo,boghison/servo,canaltinova/servo,wpgallih/servo,evilpie/servo,walac/servo,dsandeephegde/servo,SimonSapin/servo,echochamber/servo,ConnorGBrewster/servo,zhangjunlei26/servo,mukilan/servo,g-k/servo,steveklabnik/servo,saratang/servo,g-k/servo,dati91/servo,emilio/servo,tafia/servo,GyrosOfWar/servo,SimonSapin/servo,karlito40/servo,huonw/servo,thiagopnts/servo,upsuper/servo,dsandeephegde/servo,bfrohs/servo,sadmansk/servo,wpgallih/servo,pyecs/servo,dagnir/servo,bjwbell/servo,splav/servo,canaltinova/servo,cbrewster/servo,bjwbell/servo,larsbergstrom/servo,nerith/servo,Shraddha512/servo,brendandahl/servo,DominoTree/servo,dsandeephegde/servo,szeged/servo,upsuper/servo,mbrubeck/servo,jgraham/servo,dhananjay92/servo,KiChjang/servo,anthgur/servo,snf/servo,peterjoel/servo,karlito40/servo,zentner-kyle/servo,pyecs/servo,jdramani/servo,aidanhs/servo,dhananjay92/servo,saratang/servo,KiChjang/servo,karlito40/servo,paulrouget/servo,paulrouget/servo,ruud-v-a/servo,froydnj/servo,ConnorGBrewster/servo,rentongzhang/servo,jimberlage/servo,indykish/servo,pyecs/servo,peterjoel/servo,A-deLuna/servo,vks/servo,pyfisch/servo,upsuper/servo,Shraddha512/servo,szeged/servo,wpgallih/servo,Shraddha512/servo,evilpie/servo,mbrubeck/servo,peterjoel/servo,zentner-kyle/servo,GreenRecycleBin/servo,meh/servo,aweinstock314/servo,mbrubeck/servo,tschneidereit/servo,shrenikgala/servo,chotchki/servo,luniv/servo,eddyb/servo,fiji-flo/servo,runarberg/servo,pgonda/servo,evilpie/servo,GyrosOfWar/servo,splav/servo,echochamber/servo,notriddle/servo,paulrouget/servo,steveklabnik/servo,saneyuki/servo,GreenRecycleBin/servo,canaltinova/servo,mattnenterprise/servo,dmarcos/servo,DominoTree/servo,mt2d2/servo,RenaudParis/servo,froydnj/servo,chotchki/servo,meh/servo,peterjoel/servo,szeged/servo,SimonSapin/servo,canaltinova/servo,sadmansk/servo,KiChjang/servo,samfoo/servo,ruud-v-a/servo,tafia/servo,rixrix/servo,zhangjunlei26/servo,RenaudParis/servo,saneyuki/servo,michaelwu/servo,upsuper/servo,zentner-kyle/servo,DominoTree/servo,rixrix/servo,mattnenterprise/servo,tafia/servo,SimonSapin/servo,jdramani/servo,GyrosOfWar/servo,bfrohs/servo,steveklabnik/servo,boghison/servo,jgraham/servo,ruud-v-a/servo,huonw/servo,cbrewster/servo,DominoTree/servo,RenaudParis/servo,avadacatavra/servo,emilio/servo,paulrouget/servo,notriddle/servo,dhananjay92/servo,paulrouget/servo,pgonda/servo,tschneidereit/servo,s142857/servo,GyrosOfWar/servo,A-deLuna/servo,jgraham/servo,deokjinkim/servo,peterjoel/servo,dagnir/servo,dsandeephegde/servo,GreenRecycleBin/servo,s142857/servo,KiChjang/servo,saratang/servo,dagnir/servo,DominoTree/servo,vks/servo,eddyb/servo,GreenRecycleBin/servo,dati91/servo,upsuper/servo,juzer10/servo,notriddle/servo,dati91/servo,wartman4404/servo,samfoo/servo,Adenilson/prototype-viewing-distance,rixrix/servo,Shraddha512/servo,A-deLuna/servo,codemac/servo,bjwbell/servo,nnethercote/servo,froydnj/servo,s142857/servo,nnethercote/servo,larsbergstrom/servo,aidanhs/servo,meh/servo,jimberlage/servo,szeged/servo,ConnorGBrewster/servo,upsuper/servo,ruud-v-a/servo,jlegendary/servo,huonw/servo,KiChjang/servo,avadacatavra/servo,dagnir/servo,ConnorGBrewster/servo,cbrewster/servo,eddyb/servo,mt2d2/servo,splav/servo,deokjinkim/servo,boghison/servo,Shraddha512/servo,indykish/servo,anthgur/servo,ConnorGBrewster/servo,s142857/servo,pgonda/servo,s142857/servo,nnethercote/servo,walac/servo,thiagopnts/servo,juzer10/servo,mbrubeck/servo,thiagopnts/servo,jdramani/servo,codemac/servo,WriterOfAlicrow/servo,jlegendary/servo,peterjoel/servo,echochamber/servo,szeged/servo,mukilan/servo,steveklabnik/servo,splav/servo,pyfisch/servo,indykish/servo,youprofit/servo,dsandeephegde/servo,nerith/servo,WriterOfAlicrow/servo,notriddle/servo,CJ8664/servo,j3parker/servo,saratang/servo,tafia/servo,pyfisch/servo,GyrosOfWar/servo,snf/servo,echochamber/servo,boghison/servo,evilpie/servo,samfoo/servo,rixrix/servo,snf/servo,anthgur/servo,chotchki/servo,mukilan/servo,AnthonyBroadCrawford/servo,vks/servo,splav/servo,nnethercote/servo,froydnj/servo,zhangjunlei26/servo,samfoo/servo,KiChjang/servo,pyfisch/servo,michaelwu/servo,dhananjay92/servo,tempbottle/servo,runarberg/servo,eddyb/servo,huonw/servo,shrenikgala/servo,rentongzhang/servo
|
---
+++
@@ -30,3 +30,12 @@
def run(self, params):
return subprocess.call(["cargo"] + params,
env=self.build_env())
+
+ @Command('rustc',
+ description='Run the Rust compiler',
+ category='devenv',
+ allow_all_args=True)
+ @CommandArgument('params', default=None, nargs='...',
+ help="Command-line arguments to be passed through to rustc")
+ def run(self, params):
+ return subprocess.call(["rustc"] + params, env=self.build_env())
|
2b2a1848b398e59818ea7d3aa51bf7db6669917c
|
pytus2000/datadicts/__init__.py
|
pytus2000/datadicts/__init__.py
|
"""This subpackage contains all data dictionaries."""
# The Python source code gets auto-generated and this package is intentially empty.
from enum import Enum
class OrderedEnum(Enum):
"""An Enum whose members are ordered by their value."""
def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self.value <= other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self.value < other.value
return NotImplemented
class VariableEnum(OrderedEnum):
"""Contains all variables in a datadict.
Parameters:
* position: the position in the datadict (int)
* label: the string describing the variable
"""
def __init__(self, position, label):
self.position = position
self.label = label
|
"""This subpackage contains all data dictionaries."""
# The Python source code in this package other than this file has been auto-generated.
from enum import Enum
class OrderedEnum(Enum):
"""An Enum whose members are ordered by their value."""
def __ge__(self, other):
if self.__class__ is other.__class__:
return self.value >= other.value
return NotImplemented
def __gt__(self, other):
if self.__class__ is other.__class__:
return self.value > other.value
return NotImplemented
def __le__(self, other):
if self.__class__ is other.__class__:
return self.value <= other.value
return NotImplemented
def __lt__(self, other):
if self.__class__ is other.__class__:
return self.value < other.value
return NotImplemented
class VariableEnum(OrderedEnum):
"""Contains all variables in a datadict.
Parameters:
* position: the position in the datadict (int)
* label: the string describing the variable
"""
def __init__(self, position, label):
self.position = position
self.label = label
|
Update comment for auto generated files
|
Update comment for auto generated files
|
Python
|
mit
|
timtroendle/pytus2000
|
---
+++
@@ -1,5 +1,5 @@
"""This subpackage contains all data dictionaries."""
-# The Python source code gets auto-generated and this package is intentially empty.
+# The Python source code in this package other than this file has been auto-generated.
from enum import Enum
|
70d0b672eb9f857c052028be12ccae56ed798c8a
|
main/_config.py
|
main/_config.py
|
import os
import inspect
# Flask
DEBUG = True
# Amazon S3 Settings
AWS_KEY = ''
AWS_SECRET_KEY = ''
AWS_BUCKET = 'www.vpr.net'
AWS_DIRECTORY = 'sandbox/members/'
NPR_API_KEY = ''
GOOGLE_SPREADSHEET = {'USER': '',
'PASSWORD': '',
'SOURCE': ''}
# Cache Settings (units in seconds)
STATIC_EXPIRES = 60 * 24 * 3600
HTML_EXPIRES = 3600
# Frozen Flask
FREEZER_DEFAULT_MIMETYPE = 'text/html'
FREEZER_IGNORE_MIMETYPE_WARNINGS = True
FREEZER_DESTINATION = 'build'
FREEZER_BASE_URL = 'http://%s/%s' % (AWS_BUCKET, AWS_DIRECTORY)
FREEZER_STATIC_IGNORE = ['Gruntfile*', 'node_modules', 'package.json',
'dev', '.sass-cache']
WEBFACTION_PATH = AWS_DIRECTORY
ABSOLUTE_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/'
|
import os
import inspect
# Flask
DEBUG = True
# Amazon S3 Settings
AWS_KEY = ''
AWS_SECRET_KEY = ''
AWS_BUCKET = 'www.vpr.net'
AWS_DIRECTORY = 'sandbox/members'
NPR_API_KEY = ''
GOOGLE_SPREADSHEET = {'USER': '',
'PASSWORD': '',
'SOURCE': ''}
# Cache Settings (units in seconds)
STATIC_EXPIRES = 60 * 24 * 3600
HTML_EXPIRES = 3600
# Frozen Flask
FREEZER_DEFAULT_MIMETYPE = 'text/html'
FREEZER_IGNORE_MIMETYPE_WARNINGS = True
FREEZER_DESTINATION = 'build'
FREEZER_BASE_URL = 'http://%s/%s' % (AWS_BUCKET, AWS_DIRECTORY)
FREEZER_STATIC_IGNORE = ['Gruntfile*', 'node_modules', 'package.json',
'dev', '.sass-cache']
WEBFACTION_PATH = AWS_DIRECTORY
ABSOLUTE_PATH = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe()))) + '/'
|
Remove slash from sandbox path
|
Remove slash from sandbox path
|
Python
|
apache-2.0
|
vprnet/pledge-drive,vprnet/pledge-drive,vprnet/pledge-drive
|
---
+++
@@ -8,7 +8,7 @@
AWS_KEY = ''
AWS_SECRET_KEY = ''
AWS_BUCKET = 'www.vpr.net'
-AWS_DIRECTORY = 'sandbox/members/'
+AWS_DIRECTORY = 'sandbox/members'
NPR_API_KEY = ''
|
51d4e250ca8105430bb881338d4e6174c7e0d86b
|
featureExtractors/Gait.py
|
featureExtractors/Gait.py
|
import numpy as np
import os
from FeatureExtractorAbstract import FeatureExtractorAbstract
from helpers.config import PathConfig
class Gait(FeatureExtractorAbstract):
def getCSVheader(self):
return ["gaitPeriod"]
def extract(self, experiment, type, indiv):
filepath = experiment[2] + os.path.sep + PathConfig.traceFolderNormal + os.path.sep + indiv[0] + ".trace"
if not os.path.isfile(filepath):
filepath = experiment[2] + os.path.sep + PathConfig.traceFoldersAlt[type] + os.path.sep + indiv[0] + ".trace"
if not os.path.isfile(filepath):
return ['NA']
with open(filepath) as fh:
zs = []
for line in fh:
line = line.strip().split()
zs.append(line[-1])
z_fft = np.fft.rfft(zs).real
z_fft = z_fft[range(len(z_fft)/2+1)]
period = np.argmax(z_fft[1:]) + 1
return [period]
|
import numpy as np
import os
from FeatureExtractorAbstract import FeatureExtractorAbstract
from helpers.config import PathConfig
class Gait(FeatureExtractorAbstract):
def getCSVheader(self):
return ["gaitPeriod"]
# isValidLine and sameAsFloat have been copied from distanceCalc.py
def isValidLine(self, lineSplit):
return len(lineSplit) == 5 and self.sameAsFloat(lineSplit[2]) and self.sameAsFloat(lineSplit[3])
@staticmethod
def sameAsFloat(input):
try:
floatInput = float(input)
return str(floatInput) == str(input)
except ValueError:
return False
def extract(self, experiment, type, indiv):
filepath = experiment[2] + os.path.sep + PathConfig.traceFolderNormal + os.path.sep + indiv[0] + ".trace"
if not os.path.isfile(filepath):
filepath = experiment[2] + os.path.sep + PathConfig.traceFoldersAlt[type] + os.path.sep + indiv[0] + ".trace"
if not os.path.isfile(filepath):
return ['NA']
with open(filepath) as fh:
zs = []
for line in fh:
lineSplit = line.split("\t")
if not self.isValidLine(lineSplit):
lineSplit = line.split(" ")
if not self.isValidLine(lineSplit):
continue
zs.append(lineSplit[-1])
z_fft = np.fft.rfft(zs).real
z_fft = z_fft[range(len(z_fft)/2+1)]
period = np.argmax(z_fft[1:]) + 1
return [period]
|
Fix for faulty trace files
|
Fix for faulty trace files
|
Python
|
apache-2.0
|
metamarkovic/dataCollector,metamarkovic/dataCollector,fgolemo/dataCollector,fgolemo/dataCollector
|
---
+++
@@ -7,7 +7,19 @@
def getCSVheader(self):
return ["gaitPeriod"]
+
+ # isValidLine and sameAsFloat have been copied from distanceCalc.py
+ def isValidLine(self, lineSplit):
+ return len(lineSplit) == 5 and self.sameAsFloat(lineSplit[2]) and self.sameAsFloat(lineSplit[3])
+
+ @staticmethod
+ def sameAsFloat(input):
+ try:
+ floatInput = float(input)
+ return str(floatInput) == str(input)
+ except ValueError:
+ return False
def extract(self, experiment, type, indiv):
filepath = experiment[2] + os.path.sep + PathConfig.traceFolderNormal + os.path.sep + indiv[0] + ".trace"
@@ -20,9 +32,13 @@
with open(filepath) as fh:
zs = []
for line in fh:
- line = line.strip().split()
+ lineSplit = line.split("\t")
+ if not self.isValidLine(lineSplit):
+ lineSplit = line.split(" ")
+ if not self.isValidLine(lineSplit):
+ continue
- zs.append(line[-1])
+ zs.append(lineSplit[-1])
z_fft = np.fft.rfft(zs).real
z_fft = z_fft[range(len(z_fft)/2+1)]
|
3e42d128cd3139a9e35fec45b6ed3785557784f2
|
dv/uvm/core_ibex/scripts/build-instr-gen.py
|
dv/uvm/core_ibex/scripts/build-instr-gen.py
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
import shutil
import sys
from scripts_lib import run_one, start_riscv_dv_run_cmd
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', action='store_true')
parser.add_argument('--simulator', required=True)
parser.add_argument('--end-signature-addr', required=True)
parser.add_argument('--output', required=True)
parser.add_argument('--isa', required=True)
args = parser.parse_args()
# Delete the output directory if it existed to ensure a clear build
try:
shutil.rmtree(args.output)
except FileNotFoundError:
pass
cmd = (start_riscv_dv_run_cmd(args.verbose) +
['--co', '--steps=gen',
'--simulator', args.simulator,
'--output', args.output,
'--isa', args.isa,
'--end_signature_addr', args.end_signature_addr])
return run_one(args.verbose, cmd)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python3
# Copyright lowRISC contributors.
# Licensed under the Apache License, Version 2.0, see LICENSE for details.
# SPDX-License-Identifier: Apache-2.0
import argparse
import os
import shutil
import sys
from scripts_lib import run_one, start_riscv_dv_run_cmd
def main() -> int:
parser = argparse.ArgumentParser()
parser.add_argument('--verbose', action='store_true')
parser.add_argument('--simulator', required=True)
parser.add_argument('--end-signature-addr', required=True)
parser.add_argument('--output', required=True)
parser.add_argument('--isa', required=True)
args = parser.parse_args()
# Delete the output directory if it existed to ensure a clean build, then
# create it. (The creation step is needed so that we can write our log file
# in the directory from the outset).
try:
shutil.rmtree(args.output)
except FileNotFoundError:
pass
os.makedirs(args.output, exist_ok=True)
cmd = (start_riscv_dv_run_cmd(args.verbose) +
['--co', '--steps=gen',
'--simulator', args.simulator,
'--output', args.output,
'--isa', args.isa,
'--end_signature_addr', args.end_signature_addr])
log_path = os.path.join(args.output, 'build.log')
return run_one(args.verbose, cmd, redirect_stdstreams=log_path)
if __name__ == '__main__':
sys.exit(main())
|
Write instr-gen build output to a log file, rather than stdout
|
Write instr-gen build output to a log file, rather than stdout
|
Python
|
apache-2.0
|
lowRISC/ibex,lowRISC/ibex,AmbiML/ibex,AmbiML/ibex,AmbiML/ibex,lowRISC/ibex,AmbiML/ibex,lowRISC/ibex
|
---
+++
@@ -5,6 +5,7 @@
# SPDX-License-Identifier: Apache-2.0
import argparse
+import os
import shutil
import sys
@@ -21,11 +22,15 @@
args = parser.parse_args()
- # Delete the output directory if it existed to ensure a clear build
+ # Delete the output directory if it existed to ensure a clean build, then
+ # create it. (The creation step is needed so that we can write our log file
+ # in the directory from the outset).
try:
shutil.rmtree(args.output)
except FileNotFoundError:
pass
+
+ os.makedirs(args.output, exist_ok=True)
cmd = (start_riscv_dv_run_cmd(args.verbose) +
['--co', '--steps=gen',
@@ -34,7 +39,8 @@
'--isa', args.isa,
'--end_signature_addr', args.end_signature_addr])
- return run_one(args.verbose, cmd)
+ log_path = os.path.join(args.output, 'build.log')
+ return run_one(args.verbose, cmd, redirect_stdstreams=log_path)
if __name__ == '__main__':
|
d5377e06ae059a9d478c3fe06652a353f1a8359c
|
address_book/address_book.py
|
address_book/address_book.py
|
from person import Person
__all__ = ['AddressBook']
class AddressBook(object):
def __init__(self):
self.persons = []
self.groups = []
def add_person(self, person):
self.persons.append(person)
def add_group(self, group):
self.groups.append(group)
def __contains__(self, item):
if isinstance(item, Person):
return item in self.persons
return False
|
from group import Group
from person import Person
__all__ = ['AddressBook']
class AddressBook(object):
def __init__(self):
self.persons = []
self.groups = []
def add_person(self, person):
self.persons.append(person)
def add_group(self, group):
self.groups.append(group)
def __contains__(self, item):
if isinstance(item, Person):
return item in self.persons
if isinstance(item, Group):
return item in self.groups
return False
|
Make it possible to check if some group is in the AddressBook or not
|
Make it possible to check if some group is in the AddressBook or not
|
Python
|
mit
|
dizpers/python-address-book-assignment
|
---
+++
@@ -1,3 +1,5 @@
+from group import Group
+
from person import Person
__all__ = ['AddressBook']
@@ -18,4 +20,6 @@
def __contains__(self, item):
if isinstance(item, Person):
return item in self.persons
+ if isinstance(item, Group):
+ return item in self.groups
return False
|
5e0e6d672f5066b9caa2a202fe785cb2cfb1edc7
|
ai_graph_color/experiment.py
|
ai_graph_color/experiment.py
|
import setup
from algorithm import LimitedAlgorithm
def iterative(algorithms, problem, iteration_func, local_limit,
global_limit=None):
algorithm_runners = map(
lambda m: LimitedAlgorithm(
m[0], problem, setup.Evaluation(), m[1]
),
algorithms
)
iteration_values = []
iterations = 0
iteration_num = 0
last_completion = None
completed_algorithms = set()
while (not (last_completion is not None and
iteration_num - last_completion > local_limit) and
not (global_limit is not None and iterations > global_limit)):
iterations = iteration_func(iteration_num)
iteration_values.append(iterations)
for index, runner in enumerate(algorithm_runners):
runner.set_limit(iterations)
if (runner.next_output() is None and
index not in completed_algorithms):
completed_algorithms.add(index)
last_completion = iteration_num
iteration_num += 1
return {
'iterations': iteration_values,
'history': [runner.output_history for runner in algorithm_runners]
}
|
import setup
from algorithm import LimitedAlgorithm
def iterative(algorithms, problem, iteration_func, local_limit,
global_limit=None):
algorithm_runners = [
LimitedAlgorithm(
algorithm, problem, setup.Evaluation(), params
)
for algorithm, params in algorithms
]
iteration_values = []
iterations = 0
iteration_num = 0
last_completion = None
completed_algorithms = set()
while (not (last_completion is not None and
iteration_num - last_completion > local_limit) and
not (global_limit is not None and iterations > global_limit) and
(len(completed_algorithms) < len(algorithm_runners))):
iterations = iteration_func(iteration_num)
iteration_values.append(iterations)
for index, runner in enumerate(algorithm_runners):
runner.set_limit(iterations)
runner.next_output()
if (runner.setup.counter.counter < iterations and
index not in completed_algorithms):
completed_algorithms.add(index)
last_completion = iteration_num
iteration_num += 1
return {
'iterations': iteration_values,
'history': [
(runner.output_history, runner.setup.counter.counter)
for runner in algorithm_runners
]
}
|
Make iterative stopping stop if all algorithms complete, use comprehension over map
|
Make iterative stopping stop if all algorithms complete, use comprehension over map
|
Python
|
mit
|
sagersmith8/ai_graph_coloring,sagersmith8/ai_graph_coloring
|
---
+++
@@ -4,12 +4,12 @@
def iterative(algorithms, problem, iteration_func, local_limit,
global_limit=None):
- algorithm_runners = map(
- lambda m: LimitedAlgorithm(
- m[0], problem, setup.Evaluation(), m[1]
- ),
- algorithms
- )
+ algorithm_runners = [
+ LimitedAlgorithm(
+ algorithm, problem, setup.Evaluation(), params
+ )
+ for algorithm, params in algorithms
+ ]
iteration_values = []
iterations = 0
@@ -19,14 +19,16 @@
while (not (last_completion is not None and
iteration_num - last_completion > local_limit) and
- not (global_limit is not None and iterations > global_limit)):
+ not (global_limit is not None and iterations > global_limit) and
+ (len(completed_algorithms) < len(algorithm_runners))):
iterations = iteration_func(iteration_num)
iteration_values.append(iterations)
for index, runner in enumerate(algorithm_runners):
runner.set_limit(iterations)
+ runner.next_output()
- if (runner.next_output() is None and
+ if (runner.setup.counter.counter < iterations and
index not in completed_algorithms):
completed_algorithms.add(index)
last_completion = iteration_num
@@ -35,5 +37,8 @@
return {
'iterations': iteration_values,
- 'history': [runner.output_history for runner in algorithm_runners]
+ 'history': [
+ (runner.output_history, runner.setup.counter.counter)
+ for runner in algorithm_runners
+ ]
}
|
250aab8f88cb7c6ef0c99a365a717035ce4f77d6
|
rsk_mind/datasource/datasource_csv.py
|
rsk_mind/datasource/datasource_csv.py
|
import csv
from datasource import Datasource
from ..dataset import Dataset
class CSVDatasource(Datasource):
def __init__(self, path, target=None):
super(CSVDatasource, self).__init__(path)
self.target = target
def read(self):
with open(self.path, 'rb') as infile:
reader = csv.reader(infile)
header = reader.next()
rows = []
for row in reader:
if self.target is not None:
index = header.index(self.target)
target = row[index]
del row[index]
row += [target]
rows.append(row)
return Dataset(header, rows)
def write(self, dataset):
with open(self.path, 'w') as outfile:
writer = csv.writer(outfile)
writer.writerow(dataset.transformed_header)
for row in dataset.transformed_rows:
writer.writerow(row)
|
import csv
from datasource import Datasource
from ..dataset import Dataset
class CSVDatasource(Datasource):
def __init__(self, path, target=None):
super(CSVDatasource, self).__init__(path)
self.target = target
def read(self):
with open(self.path, 'rb') as infile:
reader = csv.reader(infile)
header = reader.next()
rows = []
for row in reader:
if self.target is not None:
try:
index = header.index(self.target)
except:
raise Exception('Target class not found')
target = row[index]
del row[index]
row += [target]
rows.append(row)
return Dataset(header, rows)
def write(self, dataset):
with open(self.path, 'w') as outfile:
writer = csv.writer(outfile)
writer.writerow(dataset.transformed_header)
for row in dataset.transformed_rows:
writer.writerow(row)
|
Add exception when class not found
|
Add exception when class not found
|
Python
|
mit
|
rsk-mind/rsk-mind-framework
|
---
+++
@@ -17,7 +17,10 @@
rows = []
for row in reader:
if self.target is not None:
- index = header.index(self.target)
+ try:
+ index = header.index(self.target)
+ except:
+ raise Exception('Target class not found')
target = row[index]
del row[index]
row += [target]
|
c1e1c9d63d5334140aa71c025a90e9500b299307
|
functional_tests.py
|
functional_tests.py
|
from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
|
from selenium import webdriver
import unittest
class NewVisitorTest(unittest.TestCase):
def setUp(self):
self.browser = webdriver.Firefox()
def tearDown(self):
self.browser.quit()
def test_can_start_a_list_and_retrieve_it_later(self):
# Rey has heard about a cool new online to-do app. He goes
# to check out its homepage
self.browser.get('http://localhost:8000')
# He notices the page title and header mention to-do lists
self.assertIn('To-Do', self.browser.title)
self.fail('Finish the test!')
# He is invited to enter a to-do item straight away
# He types "Make coffee" into a text box (Rey can't work when he is sleepy)
# There is still a text box inviting him to add another item.
# He enters "Go to the toilet" (Rey is very methodical)
# The page updates again, and now shows both items on his list
# Rey wonders whether the site will remember his list. Then he sees that
# the site has generated a unique URL for him
# He visits that URL - his to-do list is still there
# Satisfied, he goes back to sleep
if __name__ == '__main__':
unittest.main(warnings='ignore')
|
Update tests to be human-readable
|
Update tests to be human-readable
|
Python
|
apache-2.0
|
rocity/the-testing-goat,rocity/the-testing-goat
|
---
+++
@@ -1,6 +1,38 @@
from selenium import webdriver
+import unittest
-browser = webdriver.Firefox()
-browser.get('http://localhost:8000')
+class NewVisitorTest(unittest.TestCase):
+ def setUp(self):
+ self.browser = webdriver.Firefox()
-assert 'Django' in browser.title
+ def tearDown(self):
+ self.browser.quit()
+
+ def test_can_start_a_list_and_retrieve_it_later(self):
+
+ # Rey has heard about a cool new online to-do app. He goes
+ # to check out its homepage
+ self.browser.get('http://localhost:8000')
+
+ # He notices the page title and header mention to-do lists
+ self.assertIn('To-Do', self.browser.title)
+ self.fail('Finish the test!')
+
+ # He is invited to enter a to-do item straight away
+
+ # He types "Make coffee" into a text box (Rey can't work when he is sleepy)
+
+ # There is still a text box inviting him to add another item.
+ # He enters "Go to the toilet" (Rey is very methodical)
+
+ # The page updates again, and now shows both items on his list
+
+ # Rey wonders whether the site will remember his list. Then he sees that
+ # the site has generated a unique URL for him
+
+ # He visits that URL - his to-do list is still there
+
+ # Satisfied, he goes back to sleep
+
+if __name__ == '__main__':
+ unittest.main(warnings='ignore')
|
2bad8f41c8e64249ae3d1e0d129a41917ec73482
|
app/test_base.py
|
app/test_base.py
|
from flask.ext.testing import TestCase
import unittest
from app import create_app, db
class BaseTestCase(TestCase):
def create_app(self):
return create_app('config.TestingConfiguration')
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username, password):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
|
from flask.ext.testing import TestCase
import unittest
from app import create_app, db
class BaseTestCase(TestCase):
def create_app(self):
return create_app('config.TestingConfiguration')
def setUp(self):
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def login(self, username='admin', password='changeme'):
return self.client.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
def logout(self):
return self.client.get('/logout', follow_redirects=True)
if __name__ == '__main__':
unittest.main()
|
Add default values for login credentials in test base
|
Add default values for login credentials in test base
|
Python
|
mit
|
rtfoley/scorepy,rtfoley/scorepy,rtfoley/scorepy
|
---
+++
@@ -1,6 +1,7 @@
from flask.ext.testing import TestCase
import unittest
from app import create_app, db
+
class BaseTestCase(TestCase):
def create_app(self):
@@ -13,7 +14,7 @@
db.session.remove()
db.drop_all()
- def login(self, username, password):
+ def login(self, username='admin', password='changeme'):
return self.client.post('/login', data=dict(
username=username,
password=password
|
0f8263fb264e880bef47ce69d8b42b2bb885a2fd
|
goose/host_utils.py
|
goose/host_utils.py
|
import re
class HostUtils(object):
@classmethod
def host_selectors(self, all_selectors, host):
if host is None:
return None
if host in all_selectors:
selectors = all_selectors[host]
if type(selectors) is dict:
selectors = all_selectors[selectors['reference']]
return selectors
# for regex_string in all_selectors['regexs_references']:
# match_data = re.compile(regex_string).search(host)
# if match_data:
# reference_host = all_selectors['regexs_references'][regex_string]['reference']
# return all_selectors[reference_host]
|
import re
class HostUtils(object):
@classmethod
def host_selectors(self, all_selectors, host):
if host is None:
return None
host = host.replace("www.", "")
if host in all_selectors:
selectors = all_selectors[host]
if type(selectors) is dict:
selectors = all_selectors[selectors['reference']]
return selectors
# for regex_string in all_selectors['regexs_references']:
# match_data = re.compile(regex_string).search(host)
# if match_data:
# reference_host = all_selectors['regexs_references'][regex_string]['reference']
# return all_selectors[reference_host]
|
Add a workaround for processing rules without www
|
Add a workaround for processing rules without www
|
Python
|
apache-2.0
|
cronycle/python-goose,cronycle/python-goose,cronycle/python-goose
|
---
+++
@@ -7,6 +7,8 @@
def host_selectors(self, all_selectors, host):
if host is None:
return None
+
+ host = host.replace("www.", "")
if host in all_selectors:
selectors = all_selectors[host]
|
59bd51f4c809a7f99f186be52c6c9090b613ac42
|
tests/unit/state_test.py
|
tests/unit/state_test.py
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <nicole@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from salttesting import TestCase, skipIf
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
)
ensure_in_syspath('../')
# Import Salt libs
from salt import state
@skipIf(NO_MOCK, NO_MOCK_REASON)
class StateCompilerTestCase(TestCase):
'''
TestCase for the state compiler.
'''
def test_format_log_non_ascii_character(self):
'''
Tests running a non-ascii character through the state.format_log
function. See Issue #33605.
'''
# There is no return to test against as the format_log
# function doesn't return anything. However, we do want
# to make sure that the function doesn't stacktrace when
# called.
ret = {'changes': {'Français': {'old': 'something old',
'new': 'something new'}}}
state.format_log(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(StateCompilerTestCase, needs_daemon=False)
|
# -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Nicole Thomas <nicole@saltstack.com>`
'''
# Import Python libs
from __future__ import absolute_import
# Import Salt Testing libs
from salttesting import TestCase, skipIf
from salttesting.helpers import ensure_in_syspath
from salttesting.mock import (
NO_MOCK,
NO_MOCK_REASON,
)
ensure_in_syspath('../')
# Import Salt libs
from salt import state
@skipIf(NO_MOCK, NO_MOCK_REASON)
class StateCompilerTestCase(TestCase):
'''
TestCase for the state compiler.
'''
def test_format_log_non_ascii_character(self):
'''
Tests running a non-ascii character through the state.format_log
function. See Issue #33605.
'''
# There is no return to test against as the format_log
# function doesn't return anything. However, we do want
# to make sure that the function doesn't stacktrace when
# called.
ret = {'changes': {u'Français': {'old': 'something old',
'new': 'something new'}},
'result': True}
state.format_log(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(StateCompilerTestCase, needs_daemon=False)
|
Update test to correct iteration
|
Update test to correct iteration
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
---
+++
@@ -35,8 +35,9 @@
# function doesn't return anything. However, we do want
# to make sure that the function doesn't stacktrace when
# called.
- ret = {'changes': {'Français': {'old': 'something old',
- 'new': 'something new'}}}
+ ret = {'changes': {u'Français': {'old': 'something old',
+ 'new': 'something new'}},
+ 'result': True}
state.format_log(ret)
|
70748648bc4e7b050840ed781208ea14d21a735e
|
dragoman_blog/views.py
|
dragoman_blog/views.py
|
from dragoman_blog.models import EntryTranslation
from django.views.generic.list import ListView
class ListByTagView(ListView):
""" View for listing posts by tags"""
template_name = "dragoman_blog/entrytranslation_list_by_tag.html"
model = EntryTranslation
def get_queryset(self):
try:
tag = self.kwargs['tag']
except:
tag = ''
if (tag != ''):
object_list = self.model.objects.filter(tags__name=tag)
else:
object_list = self.model.objects.none()
return object_list
def get_context_data(self, **kwargs):
context = super(ListByTagView, self).get_context_data(**kwargs)
context['tag'] = self.kwargs['tag']
return context
|
from dragoman_blog.models import EntryTranslation
from django.views.generic.list import ListView
from django.utils.translation import get_language
class ListByTagView(ListView):
""" View for listing posts by tags"""
template_name = "dragoman_blog/entrytranslation_list_by_tag.html"
model = EntryTranslation
def get_queryset(self):
try:
tag = self.kwargs['tag']
except:
tag = ''
if (tag != ''):
object_list = self.model.objects.filter(
tags__name=tag, language_code=get_language())
else:
object_list = self.model.objects.none()
return object_list
def get_context_data(self, **kwargs):
context = super(ListByTagView, self).get_context_data(**kwargs)
context['tag'] = self.kwargs['tag']
return context
|
Add filter by language for list by tag.
|
Add filter by language for list by tag.
|
Python
|
bsd-3-clause
|
fivethreeo/django-dragoman-blog
|
---
+++
@@ -1,5 +1,6 @@
from dragoman_blog.models import EntryTranslation
from django.views.generic.list import ListView
+from django.utils.translation import get_language
class ListByTagView(ListView):
@@ -14,7 +15,8 @@
except:
tag = ''
if (tag != ''):
- object_list = self.model.objects.filter(tags__name=tag)
+ object_list = self.model.objects.filter(
+ tags__name=tag, language_code=get_language())
else:
object_list = self.model.objects.none()
return object_list
|
fca6602f8bb9e7c8f7c036665c035cd58461bf06
|
catalog/serializers.py
|
catalog/serializers.py
|
from catalog.models import Course, Category
from rest_framework import serializers
from documents.serializers import ShortDocumentSerializer
import json
class CourseSerializer(serializers.HyperlinkedModelSerializer):
meta = serializers.SerializerMethodField()
def get_meta(self, course):
return json.loads(course.description)
class Meta:
model = Course
fields = ('id', 'name', 'slug', 'url', 'meta', 'categories', 'document_set')
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
|
from catalog.models import Course, Category
from rest_framework import serializers
from documents.serializers import ShortDocumentSerializer
import json
class CourseSerializer(serializers.HyperlinkedModelSerializer):
meta = serializers.SerializerMethodField()
def get_meta(self, course):
return json.loads(course.description)
class Meta:
model = Course
fields = (
'id', 'name', 'slug', 'url', 'meta',
'categories', 'document_set', 'thread_set'
)
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class ShortCourseSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Course
fields = ('id', 'url', 'slug', 'name', )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
}
class CategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', 'parent', 'children', 'course_set')
extra_kwargs = {
'course_set': {'lookup_field': 'slug'},
}
class ShortCategorySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Category
fields = ('id', 'url', 'slug', 'name', )
|
Add thread_set to course api
|
Add thread_set to course api
|
Python
|
agpl-3.0
|
UrLab/DocHub,UrLab/beta402,UrLab/beta402,UrLab/DocHub,UrLab/beta402,UrLab/DocHub,UrLab/DocHub
|
---
+++
@@ -12,7 +12,10 @@
class Meta:
model = Course
- fields = ('id', 'name', 'slug', 'url', 'meta', 'categories', 'document_set')
+ fields = (
+ 'id', 'name', 'slug', 'url', 'meta',
+ 'categories', 'document_set', 'thread_set'
+ )
extra_kwargs = {
'url': {'lookup_field': 'slug'}
|
2ba1139fe7994da64eb1bbcf057f7205b35a0fd5
|
doc/pool_scripts/cats.py
|
doc/pool_scripts/cats.py
|
import os
import photomosaic.flickr
import photomosaic as pm
if not os.path.isfile(os.path.expanduser('~/pools/cats/pool.json')):
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
|
import os
import photomosaic.flickr
import photomosaic as pm
class MissingAPIKey(Exception):
...
if not os.path.isfile(os.path.expanduser('~/pools/cats/pool.json')):
try:
FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
except KeyError:
raise MissingAPIKey(
"This script requires the environment variable FLICKR_API_KEY "
"to run. It will be not be available on pull requests from "
"other forks. See "
"https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions"
) from None
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
else:
print("Pool was found in ~pools/cats/. No action needed.")
|
Improve error message if FLICKR_API_KEY is missing
|
Improve error message if FLICKR_API_KEY is missing
|
Python
|
bsd-3-clause
|
danielballan/photomosaic
|
---
+++
@@ -3,10 +3,24 @@
import photomosaic as pm
+class MissingAPIKey(Exception):
+ ...
+
+
if not os.path.isfile(os.path.expanduser('~/pools/cats/pool.json')):
- FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
+ try:
+ FLICKR_API_KEY = os.environ['FLICKR_API_KEY']
+ except KeyError:
+ raise MissingAPIKey(
+ "This script requires the environment variable FLICKR_API_KEY "
+ "to run. It will be not be available on pull requests from "
+ "other forks. See "
+ "https://docs.travis-ci.com/user/pull-requests/#pull-requests-and-security-restrictions"
+ ) from None
pm.set_options(flickr_api_key=FLICKR_API_KEY)
photomosaic.flickr.from_search('cats', '~/pools/cats/')
pool = pm.make_pool('~/pools/cats/*.jpg')
pm.export_pool(pool, '~/pools/cats/pool.json') # save color analysis for future reuse
+else:
+ print("Pool was found in ~pools/cats/. No action needed.")
|
e74e4e1108cac7d8438e4b412e72f016323636a1
|
chatterbot/__init__.py
|
chatterbot/__init__.py
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a3'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
"""
ChatterBot is a machine learning, conversational dialog engine.
"""
from .chatterbot import ChatBot
__version__ = '1.0.0a4'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
__all__ = (
'ChatBot',
)
|
Update release version to 1.0.0a4
|
Update release version to 1.0.0a4
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,gunthercox/ChatterBot
|
---
+++
@@ -3,7 +3,7 @@
"""
from .chatterbot import ChatBot
-__version__ = '1.0.0a3'
+__version__ = '1.0.0a4'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/ChatterBot'
|
a1a426a49511a52f5a40ab07310c1af4197feca2
|
includes/helpers.py
|
includes/helpers.py
|
# Functions that multiple plugins should use
def time_string(tdel):
if tdel.days > 14:
return "{}w ago".format(tdel.days//7)
elif tdel.days > 1:
return "{}d ago".format(tdel.days)
elif tdel.seconds > 7200:
return "{}h ago".format((tdel.days*24)+(tdel.seconds//3600))
elif tdel.seconds > 120:
return "{}m ago".format(tdel.seconds//60)
else:
return "{}s ago".format(tdel.seconds)
|
# Functions that multiple plugins should use
def time_string(tdel):
if tdel.days > 14:
return "{}w ago".format(tdel.days//7)
elif tdel.days > 1:
return "{}d ago".format(tdel.days)
elif tdel.days == 1 or tdel.seconds > 7200:
return "{}h ago".format((tdel.days*24)+(tdel.seconds//3600))
elif tdel.seconds > 120:
return "{}m ago".format(tdel.seconds//60)
else:
return "{}s ago".format(tdel.seconds)
|
Fix for 24-48 hours being incorrectly shown as 0-24 hours.
|
Fix for 24-48 hours being incorrectly shown as 0-24 hours.
|
Python
|
mit
|
Sulter/MASTERlinker
|
---
+++
@@ -6,7 +6,7 @@
return "{}w ago".format(tdel.days//7)
elif tdel.days > 1:
return "{}d ago".format(tdel.days)
- elif tdel.seconds > 7200:
+ elif tdel.days == 1 or tdel.seconds > 7200:
return "{}h ago".format((tdel.days*24)+(tdel.seconds//3600))
elif tdel.seconds > 120:
return "{}m ago".format(tdel.seconds//60)
|
2e2a00a075c7f59375f90ee8b1416800dddd53d1
|
integration/main.py
|
integration/main.py
|
from spec import skip
def simple_command_on_host():
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts():
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command():
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands():
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote():
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
from spec import skip, Spec
class Main(Spec):
def simple_command_on_host(self):
"""
Run command on host "localhost"
"""
skip()
Connection('localhost').run('echo foo')
# => Result
def simple_command_on_multiple_hosts(self):
"""
Run command on localhost...twice!
"""
skip()
Batch(['localhost', 'localhost']).run('echo foo')
# => [Result, Result
def sudo_command(self):
"""
Run command via sudo on host "localhost"
"""
skip()
Connection('localhost').sudo('echo foo')
def mixed_sudo_and_normal_commands(self):
"""
Run command via sudo, and not via sudo, on "localhost"
"""
skip()
cxn = Connection('localhost')
cxn.run('whoami')
cxn.sudo('whoami')
# Alternately...
cxn.run('whoami', runner=Basic)
cxn.run('whoami', runner=Sudo)
def switch_command_between_local_and_remote(self):
"""
Run command truly locally, and over SSH via "localhost"
Only really makes sense at the task level though...
"""
skip()
# Basic/raw
run('hostname') # Or Context().run('hostname')
Connection('localhost').run('hostname')
|
Move towards real spec usage
|
Move towards real spec usage
|
Python
|
bsd-2-clause
|
fabric/fabric
|
---
+++
@@ -1,48 +1,49 @@
-from spec import skip
+from spec import skip, Spec
-def simple_command_on_host():
- """
- Run command on host "localhost"
- """
- skip()
- Connection('localhost').run('echo foo')
- # => Result
+class Main(Spec):
+ def simple_command_on_host(self):
+ """
+ Run command on host "localhost"
+ """
+ skip()
+ Connection('localhost').run('echo foo')
+ # => Result
-def simple_command_on_multiple_hosts():
- """
- Run command on localhost...twice!
- """
- skip()
- Batch(['localhost', 'localhost']).run('echo foo')
- # => [Result, Result
+ def simple_command_on_multiple_hosts(self):
+ """
+ Run command on localhost...twice!
+ """
+ skip()
+ Batch(['localhost', 'localhost']).run('echo foo')
+ # => [Result, Result
-def sudo_command():
- """
- Run command via sudo on host "localhost"
- """
- skip()
- Connection('localhost').sudo('echo foo')
+ def sudo_command(self):
+ """
+ Run command via sudo on host "localhost"
+ """
+ skip()
+ Connection('localhost').sudo('echo foo')
-def mixed_sudo_and_normal_commands():
- """
- Run command via sudo, and not via sudo, on "localhost"
- """
- skip()
- cxn = Connection('localhost')
- cxn.run('whoami')
- cxn.sudo('whoami')
- # Alternately...
- cxn.run('whoami', runner=Basic)
- cxn.run('whoami', runner=Sudo)
+ def mixed_sudo_and_normal_commands(self):
+ """
+ Run command via sudo, and not via sudo, on "localhost"
+ """
+ skip()
+ cxn = Connection('localhost')
+ cxn.run('whoami')
+ cxn.sudo('whoami')
+ # Alternately...
+ cxn.run('whoami', runner=Basic)
+ cxn.run('whoami', runner=Sudo)
-def switch_command_between_local_and_remote():
- """
- Run command truly locally, and over SSH via "localhost"
+ def switch_command_between_local_and_remote(self):
+ """
+ Run command truly locally, and over SSH via "localhost"
- Only really makes sense at the task level though...
- """
- skip()
- # Basic/raw
- run('hostname') # Or Context().run('hostname')
- Connection('localhost').run('hostname')
+ Only really makes sense at the task level though...
+ """
+ skip()
+ # Basic/raw
+ run('hostname') # Or Context().run('hostname')
+ Connection('localhost').run('hostname')
|
42f0c76664337af80d692fe7649f3643c237cc47
|
Tests/MathFunctionsTest.py
|
Tests/MathFunctionsTest.py
|
from Math.MathFunctions import *
def pointTest():
point1 = (0, 0)
point2 = (2, 4)
print("Point 1: {}".format(point1))
print("Point 2: {}".format(point2))
print("Point distance: {}".format(pointDistance(point1[0],point1[1],point2[0],point2[1])))
angle = pointAngle(point1[0],point1[1],point2[0],point2[1]);
print("Point angle: {:.3f}, {:.3f} degrees".format(angle, angle*RAD_TO_DEG))
pointTest()
|
from Math.MathFunctions import *
import unittest
class TestPointMethods(unittest.TestCase):
def test_point(self):
point1 = (0, 0)
point2 = (2, 4)
angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
self.assertAlmostEqual(angle, 1.1071487177940904)
self.assertAlmostEqual(dist, 4.47213595499958)
class TestHelperMethods(unittest.TestCase):
def test_clamp(self):
self.assertEqual(clamp(10, 1, 5), 5)
self.assertEqual(clamp(0, 1, 5), 1)
self.assertEqual(clamp(3, 1, 5), 3)
self.assertEqual(clamp(5, 1, 5), 5)
if __name__ == '__main__':
unittest.main()
|
Use python's unit testing framework
|
Use python's unit testing framework
|
Python
|
mit
|
turtles/PythonScripts
|
---
+++
@@ -1,13 +1,23 @@
from Math.MathFunctions import *
+import unittest
-def pointTest():
- point1 = (0, 0)
- point2 = (2, 4)
+class TestPointMethods(unittest.TestCase):
+ def test_point(self):
+ point1 = (0, 0)
+ point2 = (2, 4)
- print("Point 1: {}".format(point1))
- print("Point 2: {}".format(point2))
- print("Point distance: {}".format(pointDistance(point1[0],point1[1],point2[0],point2[1])))
- angle = pointAngle(point1[0],point1[1],point2[0],point2[1]);
- print("Point angle: {:.3f}, {:.3f} degrees".format(angle, angle*RAD_TO_DEG))
+ angle = pointAngle(point1[0], point1[1], point2[0], point2[1])
+ dist = pointDistance(point1[0], point1[1], point2[0], point2[1])
-pointTest()
+ self.assertAlmostEqual(angle, 1.1071487177940904)
+ self.assertAlmostEqual(dist, 4.47213595499958)
+
+class TestHelperMethods(unittest.TestCase):
+ def test_clamp(self):
+ self.assertEqual(clamp(10, 1, 5), 5)
+ self.assertEqual(clamp(0, 1, 5), 1)
+ self.assertEqual(clamp(3, 1, 5), 3)
+ self.assertEqual(clamp(5, 1, 5), 5)
+
+if __name__ == '__main__':
+ unittest.main()
|
66bc45bb5cd8808bd65c4b796f3ca4d5564cccf8
|
cell/results.py
|
cell/results.py
|
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
|
"""cell.result"""
from __future__ import absolute_import
from __future__ import with_statement
from kombu.pools import producers
from .exceptions import CellError, NoReplyError
__all__ = ['AsyncResult']
class AsyncResult(object):
Error = CellError
NoReplyError = NoReplyError
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
self._result = None
def _first(self, replies):
if replies is not None:
replies = list(replies)
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
@property
def result(self):
if not self._result:
self._result = self.get()
return self.result
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
def gather(self, propagate=True, **kwargs):
connection = self.actor.connection
gather = self._gather
with producers[connection].acquire(block=True) as producer:
for r in gather(producer.connection, producer.channel, self.ticket,
propagate=propagate, **kwargs):
yield r
def _gather(self, *args, **kwargs):
propagate = kwargs.pop('propagate', True)
return (self.to_python(reply, propagate=propagate)
for reply in self.actor._collect_replies(*args, **kwargs))
def to_python(self, reply, propagate=True):
try:
return reply['ok']
except KeyError:
error = self.Error(*reply.get('nok') or ())
if propagate:
raise error
return error
|
Add result property to AsyncResult (it blocks if the result has not been previously retrieved, or return the result otherwise)
|
Add result property to AsyncResult
(it blocks if the result has not been previously retrieved, or return the
result otherwise)
|
Python
|
bsd-3-clause
|
celery/cell,celery/cell
|
---
+++
@@ -17,6 +17,7 @@
def __init__(self, ticket, actor):
self.ticket = ticket
self.actor = actor
+ self._result = None
def _first(self, replies):
if replies is not None:
@@ -24,7 +25,14 @@
if replies:
return replies[0]
raise self.NoReplyError('No reply received within time constraint')
-
+
+ @property
+ def result(self):
+ if not self._result:
+ self._result = self.get()
+ return self.result
+
+
def get(self, **kwargs):
return self._first(self.gather(**dict(kwargs, limit=1)))
|
20562a167e24911873e83659bccfde94b0a91061
|
do_the_tests.py
|
do_the_tests.py
|
from runtests import Tester
import os.path
tester = Tester(os.path.abspath(__file__), "fake_spectra")
tester.main(sys.argv[1:])
|
from runtests import Tester
import os.path
import sys
tester = Tester(os.path.abspath(__file__), "fake_spectra")
tester.main(sys.argv[1:])
|
Add import back to test script
|
Add import back to test script
|
Python
|
mit
|
sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra
|
---
+++
@@ -1,5 +1,6 @@
from runtests import Tester
import os.path
+import sys
tester = Tester(os.path.abspath(__file__), "fake_spectra")
|
122b4e6982fe7a74ee668c1b146c32a61c72ec7b
|
armstrong/hatband/sites.py
|
armstrong/hatband/sites.py
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def __init__(self, default_site=None, *args, **kwargs):
if default_site is None:
default_site = django_site
super(AdminSite, self).__init__(*args, **kwargs)
self._registry = HatbandAndDjangoRegistry(self,
default_site=default_site)
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = django_site._registry
|
Simplify this code and make sure AdminSite doesn't act like a singleton
|
Simplify this code and make sure AdminSite doesn't act like a singleton
Create a faux-singleton for `AdminSite` and make sure it has a copy of
all of the previously registered models. This makes
`armstrong.hatband.site` look just like `django.contrib.admin.site`.
|
Python
|
apache-2.0
|
texastribune/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband
|
---
+++
@@ -2,38 +2,7 @@
from django.contrib.admin.sites import site as django_site
-class HatbandAndDjangoRegistry(object):
- def __init__(self, site, default_site=None):
- if default_site is None:
- default_site = django_site
- super(HatbandAndDjangoRegistry, self).__init__()
- self._site = site
- self._registry = {}
- self.dicts = [self._registry, default_site._registry]
-
- def items(self):
- for d in self.dicts:
- for item in d.items():
- yield item
-
- def iteritems(self):
- return iter(self.items())
-
- def __contains__(self, k):
- for d in self.dicts:
- if k in d:
- return True
- return False
-
-
class AdminSite(DjangoAdminSite):
- def __init__(self, default_site=None, *args, **kwargs):
- if default_site is None:
- default_site = django_site
- super(AdminSite, self).__init__(*args, **kwargs)
- self._registry = HatbandAndDjangoRegistry(self,
- default_site=default_site)
-
def get_urls(self):
from django.conf.urls.defaults import patterns, url
@@ -43,3 +12,4 @@
site = AdminSite()
+site._registry = django_site._registry
|
2f373227e16124ea63586c255a19b4919f1ec150
|
functionaltests/__init__.py
|
functionaltests/__init__.py
|
"""
Copyright 2015 Rackspace
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from tempest import config
CONF = config.CONF
# Use local tempest conf if one is available.
# This usually means we're running tests outside of devstack
if os.path.exists('./etc/dev_tempest.conf'):
CONF.set_config_path('./etc/dev_tempest.conf')
|
Fix "invalid credentials" error running functional tests
|
Fix "invalid credentials" error running functional tests
This fixes the "invalid credentials" error that you will get
if you create a clean barbican environment and try to run the
functional tests. It is actually a workaround for a change
to tempest (see https://review.openstack.org/#/c/147778/) which
causes a config object to be created BEFORE we get a chance to
set the path to OUR config. When we set the path to our config
it is essentially ignored (since the config has already been
created) so the credentials in the tempest config aren't the
ones we set - hence the invalid credentials error.
This isn't an issue in devstack gate because our config
file isn't used.
This fix ensures that we will get our path update done before
tempest builds its config so our values will be used by tempest
when the real config object is created.
Change-Id: If8381aff3233934a0c733e12d64605c2dbdfe60c
|
Python
|
apache-2.0
|
cloudkeep/barbican,cneill/barbican,cloudkeep/barbican,MCDong/barbican,cneill/barbican,jmvrbanac/barbican,MCDong/barbican,openstack/barbican,openstack/barbican,jmvrbanac/barbican
|
---
+++
@@ -0,0 +1,25 @@
+"""
+Copyright 2015 Rackspace
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+"""
+import os
+
+from tempest import config
+
+CONF = config.CONF
+
+# Use local tempest conf if one is available.
+# This usually means we're running tests outside of devstack
+if os.path.exists('./etc/dev_tempest.conf'):
+ CONF.set_config_path('./etc/dev_tempest.conf')
|
|
5a102dfce418ec170a937ee50e3dbe0dac11bce9
|
pinry/settings/local_settings.example.py
|
pinry/settings/local_settings.example.py
|
import os
# Please don't change following settings unless you know what you are doing
STATIC_ROOT = '/data/static'
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
# SECURITY WARNING: keep the secret key used in production secret!
# Or just write your own secret-key here instead of using a env-variable
SECRET_KEY = "secret_key_place_holder"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/data/production.db',
}
}
# Allow users to register by themselves
ALLOW_NEW_REGISTRATIONS = True
# Delete image files once you remove your pin
IMAGE_AUTO_DELETE = True
# thumbnail size control
IMAGE_SIZES = {
'thumbnail': {'size': [240, 0]},
'standard': {'size': [600, 0]},
'square': {'crop': True, 'size': [125, 125]},
}
# Whether people can view pins without login
PUBLIC = True
|
import os
# Please don't change following settings unless you know what you are doing
STATIC_ROOT = '/data/static'
MEDIA_ROOT = os.path.join(STATIC_ROOT, 'media')
# SECURITY WARNING: keep the secret key used in production secret!
# Or just write your own secret-key here instead of using a env-variable
SECRET_KEY = "secret_key_place_holder"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# SECURITY WARNING: use your actual domain name in production!
ALLOWED_HOSTS = ['*']
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': '/data/production.db',
}
}
# Allow users to register by themselves
ALLOW_NEW_REGISTRATIONS = True
# Delete image files once you remove your pin
IMAGE_AUTO_DELETE = True
# thumbnail size control
IMAGE_SIZES = {
'thumbnail': {'size': [240, 0]},
'standard': {'size': [600, 0]},
'square': {'crop': True, 'size': [125, 125]},
}
# Whether people can view pins without login
PUBLIC = True
ENABLED_PLUGINS = [
'pinry_plugins.batteries.plugin_example.Plugin',
]
|
Add Pinry plugin config to local_settings.py
|
Feature: Add Pinry plugin config to local_settings.py
|
Python
|
bsd-2-clause
|
pinry/pinry,lapo-luchini/pinry,pinry/pinry,lapo-luchini/pinry,pinry/pinry,pinry/pinry,lapo-luchini/pinry,lapo-luchini/pinry
|
---
+++
@@ -42,3 +42,7 @@
# Whether people can view pins without login
PUBLIC = True
+
+ENABLED_PLUGINS = [
+ 'pinry_plugins.batteries.plugin_example.Plugin',
+]
|
ced02ae257246e700caa0da075d86becccc3b5c9
|
jarn/viewdoc/colors.py
|
jarn/viewdoc/colors.py
|
import os
import functools
import blessed
def color(func):
functools.wraps(func)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
import os
import functools
import blessed
def color(func):
assignments = functools.WRAPPER_ASSIGNMENTS
if not hasattr(func, '__name__'):
assignments = [x for x in assignments if x != '__name__']
@functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
return func(string)
return wrapper
term = blessed.Terminal()
bold = color(term.bold)
blue = color(term.bold_blue)
green = color(term.bold_green)
red = color(term.bold_red)
|
Fix wrapping in color decorator.
|
Fix wrapping in color decorator.
|
Python
|
bsd-2-clause
|
Jarn/jarn.viewdoc
|
---
+++
@@ -4,7 +4,11 @@
def color(func):
- functools.wraps(func)
+ assignments = functools.WRAPPER_ASSIGNMENTS
+ if not hasattr(func, '__name__'):
+ assignments = [x for x in assignments if x != '__name__']
+
+ @functools.wraps(func, assignments)
def wrapper(string):
if os.environ.get('JARN_NO_COLOR') == '1':
return string
|
8fadb2bb766bd3a18e7920a5dbf23669796330ff
|
src/mcedit2/rendering/scenegraph/bind_texture.py
|
src/mcedit2/rendering/scenegraph/bind_texture.py
|
"""
bind_texture
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from OpenGL import GL
from mcedit2.rendering.scenegraph import rendernode
from mcedit2.rendering.scenegraph.rendernode import RenderstateRenderNode
from mcedit2.rendering.scenegraph.scenenode import Node
from mcedit2.util import glutils
log = logging.getLogger(__name__)
class BindTextureRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_ENABLE_BIT | GL.GL_TEXTURE_BIT)
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
scale = self.sceneNode.scale
if scale is not None:
GL.glScale(*scale)
glutils.glActiveTexture(GL.GL_TEXTURE0)
GL.glEnable(GL.GL_TEXTURE_2D)
if self.sceneNode.texture is not None:
self.sceneNode.texture.bind()
def exit(self):
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
GL.glPopAttrib()
class BindTextureNode(Node):
RenderNodeClass = BindTextureRenderNode
def __init__(self, texture, scale=None):
"""
:type texture: glutils.Texture
"""
super(BindTextureNode, self).__init__()
self.texture = texture
self.scale = scale
|
"""
bind_texture
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from OpenGL import GL
from mcedit2.rendering.scenegraph import rendernode
from mcedit2.rendering.scenegraph.rendernode import RenderstateRenderNode
from mcedit2.rendering.scenegraph.scenenode import Node
from mcedit2.util import glutils
log = logging.getLogger(__name__)
class BindTextureRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_ENABLE_BIT | GL.GL_TEXTURE_BIT)
scale = self.sceneNode.scale
if scale is not None:
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glScale(*scale)
glutils.glActiveTexture(GL.GL_TEXTURE0) # disable texture1?
GL.glEnable(GL.GL_TEXTURE_2D)
if self.sceneNode.texture is not None:
self.sceneNode.texture.bind()
def exit(self):
if self.sceneNode.scale is not None:
# Please do not change BindTextureNode.scale during RenderNode calls, thx
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
GL.glPopAttrib()
class BindTextureNode(Node):
RenderNodeClass = BindTextureRenderNode
def __init__(self, texture, scale=None):
"""
:type texture: glutils.Texture
"""
super(BindTextureNode, self).__init__()
self.texture = texture
self.scale = scale
|
Change BindTextureRenderNode to make fewer GL calls when the texture scale is None.
|
Change BindTextureRenderNode to make fewer GL calls when the texture scale is None.
|
Python
|
bsd-3-clause
|
vorburger/mcedit2,vorburger/mcedit2
|
---
+++
@@ -15,20 +15,22 @@
class BindTextureRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_ENABLE_BIT | GL.GL_TEXTURE_BIT)
- GL.glMatrixMode(GL.GL_TEXTURE)
- GL.glPushMatrix()
- GL.glLoadIdentity()
scale = self.sceneNode.scale
if scale is not None:
+ GL.glMatrixMode(GL.GL_TEXTURE)
+ GL.glPushMatrix()
+ GL.glLoadIdentity()
GL.glScale(*scale)
- glutils.glActiveTexture(GL.GL_TEXTURE0)
+ glutils.glActiveTexture(GL.GL_TEXTURE0) # disable texture1?
GL.glEnable(GL.GL_TEXTURE_2D)
if self.sceneNode.texture is not None:
self.sceneNode.texture.bind()
def exit(self):
- GL.glMatrixMode(GL.GL_TEXTURE)
- GL.glPopMatrix()
+ if self.sceneNode.scale is not None:
+ # Please do not change BindTextureNode.scale during RenderNode calls, thx
+ GL.glMatrixMode(GL.GL_TEXTURE)
+ GL.glPopMatrix()
GL.glPopAttrib()
|
f3ce6fbb4f90e600955307e307d83103ef090d8f
|
watson-connector/watson_connector.py
|
watson-connector/watson_connector.py
|
#!/usr/bin/env python3
import json
import watson_developer_cloud as wdc
from api_aggregator import ApiAggregator
class WatsonConnector():
"""
Handles the connection to IBM Watson, API calls and whatnot.
"""
def __init__(self, url, username, password, version, db_connector):
self.db = db_connector
self.tone_analyzer = wdc.ToneAnalyzerV3(
url=url,
username=username,
password=password,
version=version
)
def analyze_tone(self, user, date):
"""
Returns the Tone Analyzer Data for a specific user and date.
"""
# TODO: Implement this method
# TODO: How to handle multiple aggregations?
# TODO: Call aggregator and send text to IBM
twitter = ApiAggregator("http://localhost:3000/api/twytta/", "created_at")
aggregation = twitter.get_for_date(date)
# payload = json.dumps(ta.tone(text=aggregation), indent=2)
payload = aggregation
new_id = self.db.put_report(user, date, payload)
return new_id
|
#!/usr/bin/env python3
import json
import watson_developer_cloud as wdc
from api_aggregator import ApiAggregator
class WatsonConnector():
"""
Handles the connection to IBM Watson, API calls and whatnot.
"""
def __init__(self, url, username, password, version, db_connector):
self.db = db_connector
self.tone_analyzer = wdc.ToneAnalyzerV3(
url=url,
username=username,
password=password,
version=version
)
def mock_watson_ta(self, data):
"""
Mocking the IBM Watson Tone Analyzer call for testing.
"""
with open('response.json.example') as data_file:
data = json.load(data_file)
return data
def analyze_tone(self, user, date):
"""
Returns the Tone Analyzer Data for a specific user and date.
"""
# TODO: Implement this method
# TODO: How to handle multiple aggregations?
# TODO: Call aggregator and send text to IBM
twitter = ApiAggregator("http://localhost:3000/api/twytta/", "created_at")
aggregation = twitter.get_for_date(date)
# Real Call
# payload = self.tone_analyzer.tone(text=aggregation)
# Fake Call
payload = self.mock_watson_ta(aggregation)
new_id = self.db.put_report(user, date, payload)
return new_id
|
Add fake IBM Watson response for testing
|
Add fake IBM Watson response for testing
|
Python
|
mit
|
martialblog/watson-diary,martialblog/watson-diary,martialblog/watson-diary
|
---
+++
@@ -22,6 +22,18 @@
version=version
)
+
+ def mock_watson_ta(self, data):
+ """
+ Mocking the IBM Watson Tone Analyzer call for testing.
+ """
+
+ with open('response.json.example') as data_file:
+ data = json.load(data_file)
+
+ return data
+
+
def analyze_tone(self, user, date):
"""
Returns the Tone Analyzer Data for a specific user and date.
@@ -33,8 +45,11 @@
twitter = ApiAggregator("http://localhost:3000/api/twytta/", "created_at")
aggregation = twitter.get_for_date(date)
- # payload = json.dumps(ta.tone(text=aggregation), indent=2)
- payload = aggregation
+ # Real Call
+ # payload = self.tone_analyzer.tone(text=aggregation)
+
+ # Fake Call
+ payload = self.mock_watson_ta(aggregation)
new_id = self.db.put_report(user, date, payload)
|
ec2d3feff6a1677457dfeb5b948b2013bc03df2a
|
classes/admin.py
|
classes/admin.py
|
from django.contrib import admin
from classes.models import Attendee
from classes.models import Attendance
from classes.models import Session
from classes.models import WalkinClass
class AttendanceInline(admin.TabularInline):
model = Attendance
extra = 1
verbose_name = 'Attendee'
verbose_name_plural = 'Attendees'
fields = ('attendee', 'start_date_time', "stop_date_time", 'notes')
# fieldsets = (
# ("Attendee", {'fields': ('name'),}),
# ("Start Date Time", {"fields": ('start_date_time'),}),
# ("Stop Date Time", {"fields": ('stop_date_time'),}),
# ('Notes', {'fields': ('notes'),}),
# )
class AttendeeAdmin(admin.ModelAdmin):
pass
class SessionAdmin(admin.ModelAdmin):
inlines = [
AttendanceInline,
]
fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", )
list_display= ('walk_in_class', 'start_date_time',)
class WalkinClassAdmin(admin.ModelAdmin):
pass
admin.site.register(Attendee, AttendeeAdmin)
admin.site.register(Session, SessionAdmin)
admin.site.register(WalkinClass, WalkinClassAdmin)
|
from django.contrib import admin
from classes.models import Attendee
from classes.models import Attendance
from classes.models import Session
from classes.models import WalkinClass
class AttendanceInline(admin.TabularInline):
model = Attendance
extra = 1
verbose_name = 'Attendee'
verbose_name_plural = 'Attendees'
fields = ('attendee', 'start_date_time', "stop_date_time", 'notes')
class SessionInline(admin.TabularInline):
model = Session
extra = 1
fields = ('start_date_time', 'stop_date_time', 'teacher')
class AttendeeAdmin(admin.ModelAdmin):
pass
class SessionAdmin(admin.ModelAdmin):
inlines = [
AttendanceInline,
]
fields = ('walk_in_class','teacher', 'start_date_time', "stop_date_time", )
list_display= ('walk_in_class', 'start_date_time',)
class WalkinClassAdmin(admin.ModelAdmin):
inlines = [
SessionInline,
]
admin.site.register(Attendee, AttendeeAdmin)
admin.site.register(Session, SessionAdmin)
admin.site.register(WalkinClass, WalkinClassAdmin)
|
Add sessions inline to classes
|
Add sessions inline to classes
|
Python
|
mit
|
thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee
|
---
+++
@@ -12,12 +12,12 @@
verbose_name = 'Attendee'
verbose_name_plural = 'Attendees'
fields = ('attendee', 'start_date_time', "stop_date_time", 'notes')
- # fieldsets = (
- # ("Attendee", {'fields': ('name'),}),
- # ("Start Date Time", {"fields": ('start_date_time'),}),
- # ("Stop Date Time", {"fields": ('stop_date_time'),}),
- # ('Notes', {'fields': ('notes'),}),
- # )
+
+
+class SessionInline(admin.TabularInline):
+ model = Session
+ extra = 1
+ fields = ('start_date_time', 'stop_date_time', 'teacher')
class AttendeeAdmin(admin.ModelAdmin):
@@ -32,7 +32,9 @@
list_display= ('walk_in_class', 'start_date_time',)
class WalkinClassAdmin(admin.ModelAdmin):
- pass
+ inlines = [
+ SessionInline,
+ ]
admin.site.register(Attendee, AttendeeAdmin)
admin.site.register(Session, SessionAdmin)
|
75c1d4a1c28efc690e230a6ad1d5244c9a84eb45
|
coil/__init__.py
|
coil/__init__.py
|
# Copyright (c) 2005-2006 Itamar Shtull-Trauring.
# Copyright (c) 2008-2009 ITA Software, Inc.
# See LICENSE.txt for details.
"""Coil: A Configuration Library."""
__version_info__ = (0,3,8)
__version__ = ".".join([str(x) for x in __version_info__])
__all__ = ['struct', 'parser', 'tokenizer', 'errors']
from coil.parser import Parser
def parse_file(file_name, **kwargs):
"""Open and parse a coil file.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: Name of file to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
coil = open(file_name)
return Parser(coil, file_name, **kwargs).root()
def parse(string, **kwargs):
"""Parse a coil string.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: String containing data to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
return Parser(string.splitlines(), **kwargs).root()
|
# Copyright (c) 2005-2006 Itamar Shtull-Trauring.
# Copyright (c) 2008-2009 ITA Software, Inc.
# See LICENSE.txt for details.
"""Coil: A Configuration Library."""
__version_info__ = (0,3,9)
__version__ = ".".join([str(x) for x in __version_info__])
__all__ = ['struct', 'parser', 'tokenizer', 'errors']
from coil.parser import Parser
def parse_file(file_name, **kwargs):
"""Open and parse a coil file.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: Name of file to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
coil = open(file_name)
return Parser(coil, file_name, **kwargs).root()
def parse(string, **kwargs):
"""Parse a coil string.
See :class:`Parser <coil.parser.Parser>` for possible keyword arguments.
:param file_name: String containing data to parse.
:type file_name: str
:return: The root object.
:rtype: :class:`Struct <coil.struct.Struct>`
"""
return Parser(string.splitlines(), **kwargs).root()
|
Bump version to 0.3.9 for development.
|
Bump version to 0.3.9 for development.
Going forward I think I will use the even/odd version scheme to mark
development and stable versions to make minimum version checks easy.
|
Python
|
mit
|
tectronics/coil,tectronics/coil,marineam/coil,kovacsbalu/coil,marineam/coil,kovacsbalu/coil
|
---
+++
@@ -4,7 +4,7 @@
"""Coil: A Configuration Library."""
-__version_info__ = (0,3,8)
+__version_info__ = (0,3,9)
__version__ = ".".join([str(x) for x in __version_info__])
__all__ = ['struct', 'parser', 'tokenizer', 'errors']
|
d8e5545a2397198b0f95854f86a4c4b0e39a42be
|
newsApp/loggingHelper.py
|
newsApp/loggingHelper.py
|
#Initialize the logging.
# InitLogging() should be called at the startup of each process in Procfile
import logging
def InitLogging():
"""
Initizalize the logging.
"""
logging.basicConfig(format='%(module)s:%(levelname)s:%(message)s', level=logging.INFO)
# suppress all logs except critical ones from boto
logging.getLogger('boto').setLevel(logging.CRITICAL)
logging.getLogger('bmemcached').setLevel(logging.ERROR)
logging.captureWarnings(True)
|
#Initialize the logging.
# InitLogging() should be called at the startup of each process in Procfile
import logging
def InitLogging():
"""
Initizalize the logging.
"""
logging.basicConfig(format='%(module)s:%(levelname)s:%(message)s', level=logging.INFO)
# suppress all logs except critical ones from boto
logging.getLogger('boto').setLevel(logging.CRITICAL)
logging.getLogger('bmemcached').setLevel(logging.ERROR)
logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
logging.captureWarnings(True)
|
Hide ignorable error from googleApiClient
|
Hide ignorable error from googleApiClient
|
Python
|
mit
|
adityabansal/newsAroundMe,adityabansal/newsAroundMe,adityabansal/newsAroundMe
|
---
+++
@@ -15,4 +15,6 @@
logging.getLogger('bmemcached').setLevel(logging.ERROR)
+ logging.getLogger('googleapiclient.discovery_cache').setLevel(logging.ERROR)
+
logging.captureWarnings(True)
|
b3c8ffd334df2c7669eb9f3a037ef6fa33fc521b
|
diylang/interpreter.py
|
diylang/interpreter.py
|
# -*- coding: utf-8 -*-
from os.path import dirname, join
from .evaluator import evaluate
from .parser import parse, unparse, parse_multiple
from .types import Environment
def interpret(source, env=None):
"""
Interpret a DIY Lang program statement
Accepts a program statement as a string, interprets it, and then
returns the resulting DIY Lang expression as string.
"""
if env is None:
env = Environment()
return unparse(evaluate(parse(source), env))
def interpret_file(filename, env=None):
"""
Interpret a DIY Lang file
Accepts the name of a DIY Lang file containing a series of statements.
Returns the value of the last expression of the file.
"""
if env is None:
env = Environment()
with open(filename, 'r') as sourcefile:
source = "".join(sourcefile.readlines())
asts = parse_multiple(source)
results = [evaluate(ast, env) for ast in asts]
return unparse(results[-1])
|
# -*- coding: utf-8 -*-
from .evaluator import evaluate
from .parser import parse, unparse, parse_multiple
from .types import Environment
def interpret(source, env=None):
"""
Interpret a DIY Lang program statement
Accepts a program statement as a string, interprets it, and then
returns the resulting DIY Lang expression as string.
"""
if env is None:
env = Environment()
return unparse(evaluate(parse(source), env))
def interpret_file(filename, env=None):
"""
Interpret a DIY Lang file
Accepts the name of a DIY Lang file containing a series of statements.
Returns the value of the last expression of the file.
"""
if env is None:
env = Environment()
with open(filename, 'r') as sourcefile:
source = "".join(sourcefile.readlines())
asts = parse_multiple(source)
results = [evaluate(ast, env) for ast in asts]
return unparse(results[-1])
|
Remove unused imports in interpeter.
|
Remove unused imports in interpeter.
|
Python
|
bsd-3-clause
|
codecop/diy-lang,codecop/diy-lang
|
---
+++
@@ -1,6 +1,4 @@
# -*- coding: utf-8 -*-
-
-from os.path import dirname, join
from .evaluator import evaluate
from .parser import parse, unparse, parse_multiple
|
36b5bbba101b421234b4b776f4f4c2ac47516027
|
indra/db/belief.py
|
indra/db/belief.py
|
class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, mk_hash, evidence, supports=None, supported_by=None):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
if supports:
self.supports = supports
else:
self.supports = []
if supported_by:
self.supported_by = supported_by
else:
self.supported_by = []
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
|
class MockStatement(object):
"""A class to imitate real INDRA Statements for calculating belief."""
def __init__(self, mk_hash, evidence, supports=None, supported_by=None):
if isinstance(evidence, list):
self.evidence = evidence
else:
self.evidence = [evidence]
self.__mk_hash = mk_hash
if supports:
self.supports = supports
else:
self.supports = []
if supported_by:
self.supported_by = supported_by
else:
self.supported_by = []
self.belief = None
def matches_key(self):
return self.__mk_hash
class MockEvidence(object):
"""A class to imitate real INDRA Evidence for calculating belief."""
def __init__(self, source_api):
self.source_api = source_api
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
def populate_support(stmts, links):
"""Populate the supports supported_by lists of statements given links.
Parameters
----------
stmts : list[MockStatement/Statement]
A list of objects with supports and supported_by attributes which are
lists or equivalent.
links : list[tuple]
A list of pairs of hashes or matches_keys, where the first supports the
second.
"""
stmt_dict = {s.matches_key(): s for s in stmts}
for supped_idx, supping_idx in links:
stmt_dict[supping_idx].supports.append(stmt_dict[supped_idx])
stmt_dict[supped_idx].supported_by.append(stmt_dict[supping_idx])
return
|
Add function to populate support from links.
|
Add function to populate support from links.
|
Python
|
bsd-2-clause
|
johnbachman/indra,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,pvtodorov/indra,bgyori/indra,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,pvtodorov/indra
|
---
+++
@@ -28,3 +28,22 @@
# Some annotations are used in indra.belief.tag_evidence_subtype.
# TODO: optionally implement necessary annotations.
self.annotations = {}
+
+
+def populate_support(stmts, links):
+ """Populate the supports supported_by lists of statements given links.
+
+ Parameters
+ ----------
+ stmts : list[MockStatement/Statement]
+ A list of objects with supports and supported_by attributes which are
+ lists or equivalent.
+ links : list[tuple]
+ A list of pairs of hashes or matches_keys, where the first supports the
+ second.
+ """
+ stmt_dict = {s.matches_key(): s for s in stmts}
+ for supped_idx, supping_idx in links:
+ stmt_dict[supping_idx].supports.append(stmt_dict[supped_idx])
+ stmt_dict[supped_idx].supported_by.append(stmt_dict[supping_idx])
+ return
|
2d7c481c7c3d01a22759802a701e4c14789935d3
|
django/__init__.py
|
django/__init__.py
|
VERSION = (1, 0, 'post-release-SVN')
def get_version():
"Returns the version as a human-format string."
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
from django.utils.version import get_svn_revision
v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
return v
|
VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
version = '%s.%s' % (VERSION[0], VERSION[1])
if VERSION[2]:
version = '%s.%s' % (version, VERSION[2])
if VERSION[3:] == ('alpha', 0):
version = '%s pre-alpha' % version
else:
version = '%s %s' % (version, VERSION[3])
if VERSION[3] != 'final':
version = '%s %s' % (version, VERSION[4])
from django.utils.version import get_svn_revision
svn_rev = get_svn_revision()
if svn_rev != u'SVN-unknown':
version = "%s %s" % (version, svn_rev)
return version
|
Update django.VERSION in trunk per previous discussion
|
Update django.VERSION in trunk per previous discussion
git-svn-id: 4f9f921b081c523744c7bf24d959a0db39629563@9103 bcc190cf-cafb-0310-a4f2-bffc1f526a37
|
Python
|
bsd-3-clause
|
FlaPer87/django-nonrel,aparo/django-nonrel,FlaPer87/django-nonrel,aparo/django-nonrel,aparo/django-nonrel,FlaPer87/django-nonrel
|
---
+++
@@ -1,9 +1,17 @@
-VERSION = (1, 0, 'post-release-SVN')
+VERSION = (1, 1, 0, 'alpha', 0)
def get_version():
- "Returns the version as a human-format string."
- v = '.'.join([str(i) for i in VERSION[:-1]])
- if VERSION[-1]:
- from django.utils.version import get_svn_revision
- v = '%s-%s-%s' % (v, VERSION[-1], get_svn_revision())
- return v
+ version = '%s.%s' % (VERSION[0], VERSION[1])
+ if VERSION[2]:
+ version = '%s.%s' % (version, VERSION[2])
+ if VERSION[3:] == ('alpha', 0):
+ version = '%s pre-alpha' % version
+ else:
+ version = '%s %s' % (version, VERSION[3])
+ if VERSION[3] != 'final':
+ version = '%s %s' % (version, VERSION[4])
+ from django.utils.version import get_svn_revision
+ svn_rev = get_svn_revision()
+ if svn_rev != u'SVN-unknown':
+ version = "%s %s" % (version, svn_rev)
+ return version
|
ab7c07078fb3fa2a5cbeda1ca04ddb91a7fb32a0
|
oauth_provider/consts.py
|
oauth_provider/consts.py
|
from django.utils.translation import ugettext_lazy as _
KEY_SIZE = 16
SECRET_SIZE = 16
VERIFIER_SIZE = 10
CONSUMER_KEY_SIZE = 256
MAX_URL_LENGTH = 2083 # http://www.boutell.com/newfaq/misc/urllength.html
PENDING = 1
ACCEPTED = 2
CANCELED = 3
REJECTED = 4
CONSUMER_STATES = (
(PENDING, _('Pending')),
(ACCEPTED, _('Accepted')),
(CANCELED, _('Canceled')),
(REJECTED, _('Rejected')),
)
PARAMETERS_NAMES = ('consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce')
OAUTH_PARAMETERS_NAMES = ['oauth_'+s for s in PARAMETERS_NAMES]
OUT_OF_BAND = 'oob'
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_KEY_SIZE', 16)
SECRET_SIZE = getattr(settings, 'OAUTH_PROVIDER_SECRET_SIZE', 16)
VERIFIER_SIZE = getattr(settings, 'OAUTH_PROVIDER_VERIFIER_SIZE', 10)
CONSUMER_KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_CONSUMER_KEY_SIZE', 256)
MAX_URL_LENGTH = 2083 # http://www.boutell.com/newfaq/misc/urllength.html
PENDING = 1
ACCEPTED = 2
CANCELED = 3
REJECTED = 4
CONSUMER_STATES = (
(PENDING, _('Pending')),
(ACCEPTED, _('Accepted')),
(CANCELED, _('Canceled')),
(REJECTED, _('Rejected')),
)
PARAMETERS_NAMES = ('consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce')
OAUTH_PARAMETERS_NAMES = ['oauth_'+s for s in PARAMETERS_NAMES]
OUT_OF_BAND = 'oob'
|
Allow settings to override default lengths.
|
Allow settings to override default lengths.
|
Python
|
bsd-3-clause
|
lukegb/django-oauth-plus,amrox/django-oauth-plus
|
---
+++
@@ -1,9 +1,10 @@
from django.utils.translation import ugettext_lazy as _
+from django.conf import settings
-KEY_SIZE = 16
-SECRET_SIZE = 16
-VERIFIER_SIZE = 10
-CONSUMER_KEY_SIZE = 256
+KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_KEY_SIZE', 16)
+SECRET_SIZE = getattr(settings, 'OAUTH_PROVIDER_SECRET_SIZE', 16)
+VERIFIER_SIZE = getattr(settings, 'OAUTH_PROVIDER_VERIFIER_SIZE', 10)
+CONSUMER_KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_CONSUMER_KEY_SIZE', 256)
MAX_URL_LENGTH = 2083 # http://www.boutell.com/newfaq/misc/urllength.html
PENDING = 1
|
4b018935c4729aff0dfcff709331f840dd05e8b6
|
kimochiconsumer/__init__.py
|
kimochiconsumer/__init__.py
|
from pyramid.config import Configurator
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
config = Configurator(settings=settings)
config.include('pyramid_mako')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('home', '/')
config.scan()
return config.make_wsgi_app()
|
from pyramid.config import Configurator
import kimochiconsumer.kimochi
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
def get_kimochi(request):
return kimochi.Kimochi(settings['kimochi.url'],
settings['kimochi.api_key'],
site_key=settings['kimochi.site_key'])
config = Configurator(settings=settings)
config.include('pyramid_mako')
config.add_static_view('static', 'static', cache_max_age=3600)
config.add_route('page', '/')
config.add_request_method(get_kimochi, 'kimochi', reify=True)
config.scan()
return config.make_wsgi_app()
|
Add kimochi client to request to make it available to views
|
Add kimochi client to request to make it available to views
|
Python
|
mit
|
matslindh/kimochi-consumer
|
---
+++
@@ -1,12 +1,19 @@
from pyramid.config import Configurator
+import kimochiconsumer.kimochi
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
+ def get_kimochi(request):
+ return kimochi.Kimochi(settings['kimochi.url'],
+ settings['kimochi.api_key'],
+ site_key=settings['kimochi.site_key'])
+
config = Configurator(settings=settings)
config.include('pyramid_mako')
config.add_static_view('static', 'static', cache_max_age=3600)
- config.add_route('home', '/')
+ config.add_route('page', '/')
+ config.add_request_method(get_kimochi, 'kimochi', reify=True)
config.scan()
return config.make_wsgi_app()
|
ebc5d1024c45a12595507fa1caa0bfc6353a9a32
|
c2cgeoportal/views/echo.py
|
c2cgeoportal/views/echo.py
|
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
@view_config(route_name='echo')
def echo(request):
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = file.file
response.content_type = 'application/octet-stream'
return response
|
from base64 import b64encode
import os.path
import re
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.response import Response
from pyramid.view import view_config
def base64_encode_chunks(file, chunk_size=57):
"""
Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
See http://en.wikipedia.org/wiki/Base64
"""
while True:
line = file.read(chunk_size)
if not line:
break
yield b64encode(line) + '\n'
@view_config(route_name='echo')
def echo(request):
"""
Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
"""
if request.method != 'POST':
raise HTTPBadRequest()
try:
file = request.POST['file']
except KeyError:
raise HTTPBadRequest()
response = Response()
response.app_iter = base64_encode_chunks(file.file)
response.content_type = 'text/html'
return response
|
Return a base64 text/html response instead of a binary response
|
Return a base64 text/html response instead of a binary response
|
Python
|
bsd-2-clause
|
tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal,tsauerwein/c2cgeoportal
|
---
+++
@@ -1,3 +1,4 @@
+from base64 import b64encode
import os.path
import re
@@ -6,8 +7,25 @@
from pyramid.view import view_config
+def base64_encode_chunks(file, chunk_size=57):
+ """
+ Generate base64 encoded lines of up to 76 (== 57 * 8 / 6) characters, according to RFC2045.
+ See http://en.wikipedia.org/wiki/Base64
+ """
+ while True:
+ line = file.read(chunk_size)
+ if not line:
+ break
+ yield b64encode(line) + '\n'
+
+
@view_config(route_name='echo')
def echo(request):
+ """
+ Echo an uploaded file back to the client as an text/html document so it can be handled by Ext.
+ The response is base64 encoded to ensure that there are no special HTML characters or charset problems.
+ See http://docs.sencha.com/ext-js/3-4/#!/api/Ext.form.BasicForm-cfg-fileUpload
+ """
if request.method != 'POST':
raise HTTPBadRequest()
try:
@@ -15,6 +33,6 @@
except KeyError:
raise HTTPBadRequest()
response = Response()
- response.app_iter = file.file
- response.content_type = 'application/octet-stream'
+ response.app_iter = base64_encode_chunks(file.file)
+ response.content_type = 'text/html'
return response
|
089af405b331ecfa2cb0cf9a74423c392beea4e4
|
lazysignup/test_settings.py
|
lazysignup/test_settings.py
|
# Settings to be used when running unit tests
# python manage.py test --settings=lazysignup.test_settings lazysignup
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
INSTALLED_APPS = (
# Put any other apps that your app depends on here
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.contenttypes',
'lazysignup',
)
SITE_ID = 1
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"lazysignup.backends.LazySignupBackend",
)
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"lazysignup.middleware.LazySignupMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
LAZYSIGNUP_USER_AGENT_BLACKLIST = [
"^search",
]
# This merely needs to be present - as long as your test case specifies a
# urls attribute, it does not need to be populated.
ROOT_URLCONF = ''
|
# Settings to be used when running unit tests
# python manage.py test --settings=lazysignup.test_settings lazysignup
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = '' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
INSTALLED_APPS = (
# Put any other apps that your app depends on here
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.contenttypes',
'lazysignup',
)
SITE_ID = 1
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"lazysignup.backends.LazySignupBackend",
)
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
LAZYSIGNUP_USER_AGENT_BLACKLIST = [
"^search",
]
# This merely needs to be present - as long as your test case specifies a
# urls attribute, it does not need to be populated.
ROOT_URLCONF = ''
|
Remove the now-defunct middleware from the test settings
|
Remove the now-defunct middleware from the test settings
|
Python
|
bsd-3-clause
|
rwillmer/django-lazysignup,stefanklug/django-lazysignup,rwillmer/django-lazysignup,danfairs/django-lazysignup,danfairs/django-lazysignup,stefanklug/django-lazysignup
|
---
+++
@@ -25,7 +25,6 @@
MIDDLEWARE_CLASSES = [
"django.middleware.common.CommonMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
- "lazysignup.middleware.LazySignupMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
]
|
fa9fc567e9dcbfa5f301e3c998f87699f9ee11d5
|
magnumclient/magnum.py
|
magnumclient/magnum.py
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CLI commands not yet implemented
* bay list
* bay create
* bay delete
* bay show
* pod create
* pod list
* pod delete
* pod show
* container create
* container list
* container delete
* container show
* container reboot
* container stop
* container start
* container pause
* container unpause
* container logs
* container execute
"""
def main():
print 'This is the client to be'
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CLI commands not yet implemented
* bay list
* bay create
* bay delete
* bay show
* pod create
* pod list
* pod delete
* pod show
* service create
* service list
* service delete
* service show
* container create
* container list
* container delete
* container show
* container reboot
* container stop
* container start
* container pause
* container unpause
* container logs
* container execute
"""
def main():
print 'This is the client to be'
|
Add service API methods to what should be implemented
|
Add service API methods to what should be implemented
Add service API methods
|
Python
|
apache-2.0
|
openstack/python-magnumclient,ramielrowe/python-magnumclient,ramielrowe/python-magnumclient
|
---
+++
@@ -24,6 +24,11 @@
* pod delete
* pod show
+* service create
+* service list
+* service delete
+* service show
+
* container create
* container list
* container delete
|
eace27e618187860564de9501aabcb360112025a
|
matchmaker/skill.py
|
matchmaker/skill.py
|
import datetime
from sqlalchemy.sql import func
from models import MatchResult, BotSkill, BotRank
class SkillUpdater(object):
def run(self, db):
session = db.session
today = datetime.date.today()
skills = session.query(
MatchResult.bot,
func.sum(MatchResult.delta_chips),
func.sum(MatchResult.hands)
) \
.group_by(MatchResult.bot) \
.all()
yesterday = today - datetime.timedelta(days=1)
yesterdays_skills = {b.bot: b.skill for b in
BotSkill.query.filter_by(date=yesterday).all()}
BotSkill.query.filter_by(date=today).delete()
session.bulk_save_objects(
[BotSkill(s[0], today, s[1] / s[2], yesterdays_skills.get(s[0], 0))
for s in skills]
)
session.commit()
class RankUpdater(object):
def run(self, db):
BotRank.query.delete()
today = datetime.date.today()
skills = BotSkill.query.filter_by(date=today) \
.order_by(BotSkill.skill.desc()) \
.all()
for i, skill in enumerate(skills, 1):
rank = BotRank(skill.bot, i)
db.session.add(rank)
db.session.commit()
|
import datetime
from sqlalchemy.sql import func
from models import MatchResult, BotSkill, BotRank, BotIdentity
class SkillUpdater(object):
def run(self, db):
session = db.session
today = datetime.date.today()
skills = session.query(
BotIdentity.id,
func.coalesce(func.sum(MatchResult.delta_chips), 0),
func.coalesce(func.sum(MatchResult.hands), 0),
) \
.outerjoin(MatchResult) \
.group_by(BotIdentity.id) \
.all()
yesterday = today - datetime.timedelta(days=1)
yesterdays_skills = {b.bot: b.skill for b in
BotSkill.query.filter_by(date=yesterday).all()}
BotSkill.query.filter_by(date=today).delete()
session.bulk_save_objects(
[BotSkill(s[0], today,
self.calc_winnings_per_hand(s[1], s[2]),
yesterdays_skills.get(s[0], 0))
for s in skills]
)
session.commit()
def calc_winnings_per_hand(self, chips, hand):
try:
return chips / hand
except ZeroDivisionError:
return 0
class RankUpdater(object):
def run(self, db):
BotRank.query.delete()
today = datetime.date.today()
skills = BotSkill.query.filter_by(date=today) \
.order_by(BotSkill.skill.desc()) \
.all()
for i, skill in enumerate(skills, 1):
rank = BotRank(skill.bot, i)
db.session.add(rank)
db.session.commit()
|
Add bots with no games to the leaderboard
|
Add bots with no games to the leaderboard
|
Python
|
mit
|
gnmerritt/casino,gnmerritt/casino,gnmerritt/casino,gnmerritt/casino
|
---
+++
@@ -1,6 +1,6 @@
import datetime
from sqlalchemy.sql import func
-from models import MatchResult, BotSkill, BotRank
+from models import MatchResult, BotSkill, BotRank, BotIdentity
class SkillUpdater(object):
@@ -8,21 +8,30 @@
session = db.session
today = datetime.date.today()
skills = session.query(
- MatchResult.bot,
- func.sum(MatchResult.delta_chips),
- func.sum(MatchResult.hands)
+ BotIdentity.id,
+ func.coalesce(func.sum(MatchResult.delta_chips), 0),
+ func.coalesce(func.sum(MatchResult.hands), 0),
) \
- .group_by(MatchResult.bot) \
+ .outerjoin(MatchResult) \
+ .group_by(BotIdentity.id) \
.all()
yesterday = today - datetime.timedelta(days=1)
yesterdays_skills = {b.bot: b.skill for b in
BotSkill.query.filter_by(date=yesterday).all()}
BotSkill.query.filter_by(date=today).delete()
session.bulk_save_objects(
- [BotSkill(s[0], today, s[1] / s[2], yesterdays_skills.get(s[0], 0))
+ [BotSkill(s[0], today,
+ self.calc_winnings_per_hand(s[1], s[2]),
+ yesterdays_skills.get(s[0], 0))
for s in skills]
)
session.commit()
+
+ def calc_winnings_per_hand(self, chips, hand):
+ try:
+ return chips / hand
+ except ZeroDivisionError:
+ return 0
class RankUpdater(object):
|
327b3c12924c14864d0101bb80104db1e975503e
|
python/pyhit/__init__.py
|
python/pyhit/__init__.py
|
import os
import sys
import subprocess
import mooseutils
moose_dir = mooseutils.git_root_dir(os.path.dirname(__file__))
status = mooseutils.git_submodule_status(moose_dir)
# Use framework/contrib/hit because moosetools submodule is not available
if status['moosetools'] == '-':
try:
from . import hit
except:
moose_test_dir = os.path.abspath(os.path.join(moose_dir, 'test'))
subprocess.run(['make', 'hit'], cwd=moose_test_dir)
from . import hit
# Use hit in moosetools submodule
else:
hit_dir = os.path.join(moose_dir, 'moosetools', 'contrib', 'hit')
try:
sys.path.append(hit_dir)
import hit
except:
subprocess.run(['make', 'hit.so'], cwd=hit_dir)
import hit
from hit import TokenType, Token
from .pyhit import Node, load, write, parse, tokenize
|
import os
import sys
import subprocess
import mooseutils
moose_dir = mooseutils.git_root_dir(os.path.dirname(__file__))
status = mooseutils.git_submodule_status(moose_dir)
# Use framework/contrib/hit because moosetools submodule is not available
if status['moosetools'] == '-':
hit_dir = os.path.join(moose_dir, 'framework', 'contrib', 'hit')
sys.path.append(hit_dir)
try:
import hit
except:
moose_test_dir = os.path.abspath(os.path.join(moose_dir, 'test'))
subprocess.run(['make', 'hit'], cwd=moose_test_dir)
import hit
# Use hit in moosetools submodule
else:
hit_dir = os.path.join(moose_dir, 'moosetools', 'contrib', 'hit')
sys.path.append(hit_dir)
try:
import hit
except:
subprocess.run(['make', 'hit.so'], cwd=hit_dir)
import hit
from hit import TokenType, Token
from .pyhit import Node, load, write, parse, tokenize
|
Add known location for HIT when loading pyhit
|
Add known location for HIT when loading pyhit
(refs #17108)
|
Python
|
lgpl-2.1
|
idaholab/moose,laagesen/moose,SudiptaBiswas/moose,SudiptaBiswas/moose,idaholab/moose,jessecarterMOOSE/moose,andrsd/moose,laagesen/moose,dschwen/moose,milljm/moose,jessecarterMOOSE/moose,sapitts/moose,andrsd/moose,sapitts/moose,SudiptaBiswas/moose,bwspenc/moose,nuclear-wizard/moose,laagesen/moose,nuclear-wizard/moose,dschwen/moose,lindsayad/moose,milljm/moose,jessecarterMOOSE/moose,nuclear-wizard/moose,milljm/moose,harterj/moose,andrsd/moose,lindsayad/moose,dschwen/moose,sapitts/moose,SudiptaBiswas/moose,dschwen/moose,nuclear-wizard/moose,harterj/moose,idaholab/moose,bwspenc/moose,milljm/moose,lindsayad/moose,jessecarterMOOSE/moose,harterj/moose,milljm/moose,idaholab/moose,dschwen/moose,sapitts/moose,harterj/moose,laagesen/moose,lindsayad/moose,harterj/moose,idaholab/moose,bwspenc/moose,laagesen/moose,lindsayad/moose,sapitts/moose,SudiptaBiswas/moose,jessecarterMOOSE/moose,andrsd/moose,andrsd/moose,bwspenc/moose,bwspenc/moose
|
---
+++
@@ -8,17 +8,19 @@
# Use framework/contrib/hit because moosetools submodule is not available
if status['moosetools'] == '-':
+ hit_dir = os.path.join(moose_dir, 'framework', 'contrib', 'hit')
+ sys.path.append(hit_dir)
try:
- from . import hit
+ import hit
except:
moose_test_dir = os.path.abspath(os.path.join(moose_dir, 'test'))
subprocess.run(['make', 'hit'], cwd=moose_test_dir)
- from . import hit
+ import hit
# Use hit in moosetools submodule
else:
hit_dir = os.path.join(moose_dir, 'moosetools', 'contrib', 'hit')
+ sys.path.append(hit_dir)
try:
- sys.path.append(hit_dir)
import hit
except:
subprocess.run(['make', 'hit.so'], cwd=hit_dir)
|
be27ec6d2567b85b94b40c79570ca5d9c20fd0bf
|
modeltrans/admin.py
|
modeltrans/admin.py
|
from .conf import get_default_language
from .translator import get_i18n_field
from .utils import get_language
class ActiveLanguageMixin(object):
'''
Hide all translated fields, except:
- The field for the default language (settings.LANGUAGE_CODE)
- The field for the currently active language.
'''
def get_exclude(self, request, obj=None):
i18n_field = get_i18n_field(self.model)
if i18n_field is None:
return super(ActiveLanguageMixin, self).get_exclude(request)
language = get_language()
if language == get_default_language():
language = False
excludes = []
for field in i18n_field.get_translated_fields():
if field.language is None or field.language == language:
continue
excludes.append(field.name)
return excludes
|
from .conf import get_default_language
from .translator import get_i18n_field
from .utils import get_language
class ActiveLanguageMixin(object):
'''
Add this mixin to your admin class to hide the untranslated field and all
translated fields, except:
- The field for the default language (settings.LANGUAGE_CODE)
- The field for the currently active language.
'''
def get_exclude(self, request, obj=None):
# use default implementation for models without i18n-field
i18n_field = get_i18n_field(self.model)
if i18n_field is None:
return super(ActiveLanguageMixin, self).get_exclude(request)
language = get_language()
if language == get_default_language():
language = False
excludes = []
for field in i18n_field.get_translated_fields():
if field.language is None or field.language == language:
continue
excludes.append(field.name)
# also add the name of the original field, as it is added
excludes.append(field.original_field.name)
# de-duplicate
return list(set(excludes))
|
Improve ActiveLanguageMixin to hide original field
|
Improve ActiveLanguageMixin to hide original field
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
---
+++
@@ -5,12 +5,14 @@
class ActiveLanguageMixin(object):
'''
- Hide all translated fields, except:
+ Add this mixin to your admin class to hide the untranslated field and all
+ translated fields, except:
+
- The field for the default language (settings.LANGUAGE_CODE)
- The field for the currently active language.
'''
def get_exclude(self, request, obj=None):
-
+ # use default implementation for models without i18n-field
i18n_field = get_i18n_field(self.model)
if i18n_field is None:
return super(ActiveLanguageMixin, self).get_exclude(request)
@@ -25,4 +27,8 @@
continue
excludes.append(field.name)
- return excludes
+ # also add the name of the original field, as it is added
+ excludes.append(field.original_field.name)
+
+ # de-duplicate
+ return list(set(excludes))
|
c32de922e3e7419d58a8cd7c1a00cf53833c49c7
|
mpltools/io/core.py
|
mpltools/io/core.py
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures. If None, default to rc parameter
'savefig.extension'.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
except IndexError:
continue
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
|
import os
import matplotlib.pyplot as plt
def save_all_figs(directory='./', fmt='png', default_name='untitled%i'):
"""Save all open figures.
Each figure is saved with the title of the plot, if possible, and multiple
file formats can be saved by specifying a list of extensions.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
Image format(s) of saved figures.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
Examples
--------
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
if isinstance(fmt, basestring):
fmt = [fmt]
for fignum in plt.get_fignums():
try:
filename = plt.figure(fignum).get_axes()[0].get_title()
except IndexError:
continue
if filename == '':
filename = default_name % fignum
savepath = os.path.join(directory, filename)
for a_fmt in fmt:
savename = '%s.%s' % (savepath, a_fmt)
plt.savefig(savename)
print("Saved '%s'" % savename)
|
Simplify handling of image format.
|
Simplify handling of image format.
|
Python
|
bsd-3-clause
|
matteoicardi/mpltools,tonysyu/mpltools
|
---
+++
@@ -2,18 +2,18 @@
import matplotlib.pyplot as plt
-def save_all_figs(directory='./', fmt=None, default_name='untitled%i'):
+def save_all_figs(directory='./', fmt='png', default_name='untitled%i'):
"""Save all open figures.
- Each figure is saved with the title of the plot, if possible.
+ Each figure is saved with the title of the plot, if possible, and multiple
+ file formats can be saved by specifying a list of extensions.
Parameters
------------
directory : str
Path where figures are saved.
fmt : str, list of str
- Image format(s) of saved figures. If None, default to rc parameter
- 'savefig.extension'.
+ Image format(s) of saved figures.
default_name : str
Default filename to use if plot has no title. Must contain '%i' for the
figure number.
@@ -23,7 +23,6 @@
>>> save_all_figs('plots/', fmt=['pdf','png'])
"""
- fmt = fmt if fmt is not None else 'png'
if isinstance(fmt, basestring):
fmt = [fmt]
|
6d4573445692b86c4b406a4b1d93ce19199c328a
|
brp/accounts/migrations/0001_initial.py
|
brp/accounts/migrations/0001_initial.py
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration, DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
db.create_table(u'accounts_userprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user_id', self.gf('django.db.models.fields.IntegerField')(null=False)),
('institution', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
('eula', self.gf('django.db.models.fields.BooleanField')(default=False)),
('reason', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)),
))
db.send_create_signal(u'accounts', ['UserProfile'])
"Perform a 'safe' load using Avocado's backup utilities."
from django.core.management import call_command
call_command('loaddata', 'brp/apps/accounts/fixtures/0001_eula.json')
def backwards(self, orm):
db.delete_table(u'accounts_userprofile')
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration, DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
db.create_table(u'accounts_userprofile', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('user_id', self.gf('django.db.models.fields.IntegerField')(null=False)),
('institution', self.gf('django.db.models.fields.CharField')(max_length=100, null=True)),
('eula', self.gf('django.db.models.fields.BooleanField')(default=False)),
('reason', self.gf('django.db.models.fields.CharField')(max_length=255, null=True)),
))
db.send_create_signal(u'accounts', ['UserProfile'])
"Perform a 'safe' load using Avocado's backup utilities."
from django.core.management import call_command
call_command('loaddata', 'brp/accounts/fixtures/0001_eula.json')
def backwards(self, orm):
db.delete_table(u'accounts_userprofile')
|
Adjust migration for new structure
|
Adjust migration for new structure
|
Python
|
bsd-2-clause
|
chop-dbhi/biorepo-portal,chop-dbhi/biorepo-portal,chop-dbhi/biorepo-portal,chop-dbhi/biorepo-portal
|
---
+++
@@ -20,7 +20,7 @@
"Perform a 'safe' load using Avocado's backup utilities."
from django.core.management import call_command
- call_command('loaddata', 'brp/apps/accounts/fixtures/0001_eula.json')
+ call_command('loaddata', 'brp/accounts/fixtures/0001_eula.json')
def backwards(self, orm):
db.delete_table(u'accounts_userprofile')
|
8c097f07eca52dc37e8d3d4591bb9ee1c05fa310
|
calexicon/calendars/tests/test_other.py
|
calexicon/calendars/tests/test_other.py
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
|
from datetime import date as vanilla_date
from calendar_testing import CalendarTest
from calexicon.calendars.other import JulianDayNumber
class TestJulianDayNumber(CalendarTest):
def setUp(self):
self.calendar = JulianDayNumber()
def test_make_date(self):
vd = vanilla_date(2010, 8, 1)
d = self.calendar.from_date(vd)
self.assertIsNotNone(d)
def test_first_date(self):
vd = vanilla_date(1, 1, 1)
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
def compare_date_and_number(self, year, month, day, number):
vd = vanilla_date(year, month, day)
d = self.calendar.from_date(vd)
self.assertEqual(d.native_representation(), {'day_number': number})
def test_other_date(self):
self.compare_date_and_number(2013, 1, 1, 2456293)
|
Add a new test - on date to day number conversion.
|
Add a new test - on date to day number conversion.
|
Python
|
apache-2.0
|
jwg4/qual,jwg4/calexicon
|
---
+++
@@ -18,3 +18,10 @@
d = self.calendar.from_date(vd)
self.assertEqual(str(d), 'Day 1721423 (Julian Day Number)')
+ def compare_date_and_number(self, year, month, day, number):
+ vd = vanilla_date(year, month, day)
+ d = self.calendar.from_date(vd)
+ self.assertEqual(d.native_representation(), {'day_number': number})
+
+ def test_other_date(self):
+ self.compare_date_and_number(2013, 1, 1, 2456293)
|
5161d6c0023151d39fb56a85f739063205e676f4
|
nova/api/manager.py
|
nova/api/manager.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import manager
from nova.network import driver
class MetadataManager(manager.Manager):
"""Metadata Manager.
This class manages the Metadata API service initialization. Currently, it
just adds an iptables filter rule for the metadata service.
"""
def __init__(self, *args, **kwargs):
super(MetadataManager, self).__init__(*args, **kwargs)
self.network_driver = driver.load_network_driver()
def init_host(self):
"""Perform any initialization.
Currently, we only add an iptables filter rule for the metadata
service.
"""
self.network_driver.metadata_accept()
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import manager
from nova.network import driver
class MetadataManager(manager.Manager):
"""Metadata Manager.
This class manages the Metadata API service initialization. Currently, it
just adds an iptables filter rule for the metadata service.
"""
def __init__(self, *args, **kwargs):
super(MetadataManager, self).__init__(*args, **kwargs)
self.network_driver = driver.load_network_driver()
self.network_driver.metadata_accept()
|
Initialize iptables rules on initialization of MetadataManager
|
Initialize iptables rules on initialization of MetadataManager
To avoid multiple initialization of iptables rules if there are a few
workers for metadata service, perform iptables configuration in
__init__() of MetadataManager.
Change-Id: I674c04f973318f06cbb98693f0a884c824af8748
Closes-Bug: #1097999
|
Python
|
apache-2.0
|
noironetworks/nova,orbitfp7/nova,eayunstack/nova,badock/nova,JioCloud/nova,CEG-FYP-OpenStack/scheduler,badock/nova,mikalstill/nova,gooddata/openstack-nova,LoHChina/nova,rahulunair/nova,jeffrey4l/nova,shahar-stratoscale/nova,vmturbo/nova,scripnichenko/nova,mandeepdhami/nova,cyx1231st/nova,felixma/nova,luogangyi/bcec-nova,leilihh/nova,vmturbo/nova,devoid/nova,Tehsmash/nova,viggates/nova,blueboxgroup/nova,akash1808/nova,alexandrucoman/vbox-nova-driver,takeshineshiro/nova,kimjaejoong/nova,ruslanloman/nova,klmitch/nova,ewindisch/nova,orbitfp7/nova,alvarolopez/nova,openstack/nova,devendermishrajio/nova_test_latest,iuliat/nova,TwinkleChawla/nova,eharney/nova,mahak/nova,watonyweng/nova,mgagne/nova,edulramirez/nova,phenoxim/nova,alvarolopez/nova,akash1808/nova,double12gzh/nova,devendermishrajio/nova,CiscoSystems/nova,NeCTAR-RC/nova,eayunstack/nova,gooddata/openstack-nova,leilihh/novaha,joker946/nova,iuliat/nova,dawnpower/nova,takeshineshiro/nova,j-carpentier/nova,yosshy/nova,bgxavier/nova,angdraug/nova,maelnor/nova,silenceli/nova,Juniper/nova,jianghuaw/nova,cernops/nova,bigswitch/nova,BeyondTheClouds/nova,leilihh/nova,Francis-Liu/animated-broccoli,yatinkumbhare/openstack-nova,ewindisch/nova,blueboxgroup/nova,BeyondTheClouds/nova,nikesh-mahalka/nova,BeyondTheClouds/nova,mikalstill/nova,barnsnake351/nova,cloudbase/nova,petrutlucian94/nova,saleemjaveds/https-github.com-openstack-nova,shail2810/nova,yosshy/nova,CloudServer/nova,cloudbase/nova-virtualbox,rajalokan/nova,tianweizhang/nova,rrader/nova-docker-plugin,vmturbo/nova,rrader/nova-docker-plugin,adelina-t/nova,Juniper/nova,viggates/nova,LoHChina/nova,angdraug/nova,rajalokan/nova,Yusuke1987/openstack_template,redhat-openstack/nova,devoid/nova,apporc/nova,klmitch/nova,ted-gould/nova,cloudbase/nova,devendermishrajio/nova_test_latest,dawnpower/nova,jeffrey4l/nova,belmiromoreira/nova,rajalokan/nova,thomasem/nova,CCI-MOC/nova,gooddata/openstack-nova,vladikr/nova_drafts,virtualopensystems/nova,projectcalico/calico-nova,Stavitsky/nova,Tehsmash/nova,sacharya/nova,cernops/nova,affo/nova,adelina-t/nova,Juniper/nova,CloudServer/nova,silenceli/nova,varunarya10/nova_test_latest,berrange/nova,shahar-stratoscale/nova,bigswitch/nova,thomasem/nova,akash1808/nova_test_latest,fnordahl/nova,isyippee/nova,hanlind/nova,virtualopensystems/nova,alaski/nova,spring-week-topos/nova-week,MountainWei/nova,noironetworks/nova,Francis-Liu/animated-broccoli,hanlind/nova,alaski/nova,joker946/nova,vmturbo/nova,jianghuaw/nova,JianyuWang/nova,leilihh/novaha,nikesh-mahalka/nova,tangfeixiong/nova,devendermishrajio/nova,openstack/nova,akash1808/nova_test_latest,eonpatapon/nova,cloudbase/nova-virtualbox,luogangyi/bcec-nova,eharney/nova,zzicewind/nova,mahak/nova,spring-week-topos/nova-week,dims/nova,tangfeixiong/nova,tanglei528/nova,cernops/nova,Metaswitch/calico-nova,scripnichenko/nova,NeCTAR-RC/nova,vladikr/nova_drafts,varunarya10/nova_test_latest,zaina/nova,MountainWei/nova,klmitch/nova,double12gzh/nova,yatinkumbhare/openstack-nova,JioCloud/nova_test_latest,whitepages/nova,mikalstill/nova,dims/nova,mandeepdhami/nova,Juniper/nova,phenoxim/nova,zhimin711/nova,isyippee/nova,apporc/nova,alexandrucoman/vbox-nova-driver,watonyweng/nova,Metaswitch/calico-nova,OpenAcademy-OpenStack/nova-scheduler,raildo/nova,raildo/nova,fnordahl/nova,CCI-MOC/nova,TwinkleChawla/nova,barnsnake351/nova,cyx1231st/nova,ted-gould/nova,gooddata/openstack-nova,zhimin711/nova,petrutlucian94/nova_dev,kimjaejoong/nova,sebrandon1/nova,shail2810/nova,rajalokan/nova,tealover/nova,tudorvio/nova,mgagne/nova,felixma/nova,saleemjaveds/https-github.com-openstack-nova,jianghuaw/nova,projectcalico/calico-nova,maelnor/nova,sacharya/nova,mmnelemane/nova,JioCloud/nova,tianweizhang/nova,belmiromoreira/nova,sebrandon1/nova,openstack/nova,eonpatapon/nova,mahak/nova,JianyuWang/nova,CiscoSystems/nova,bgxavier/nova,OpenAcademy-OpenStack/nova-scheduler,redhat-openstack/nova,whitepages/nova,petrutlucian94/nova_dev,tealover/nova,zaina/nova,affo/nova,tudorvio/nova,rahulunair/nova,sebrandon1/nova,CEG-FYP-OpenStack/scheduler,Stavitsky/nova,klmitch/nova,edulramirez/nova,zzicewind/nova,ruslanloman/nova,jianghuaw/nova,berrange/nova,petrutlucian94/nova,JioCloud/nova_test_latest,rahulunair/nova,tanglei528/nova,hanlind/nova,j-carpentier/nova,Yusuke1987/openstack_template,cloudbase/nova,mmnelemane/nova
|
---
+++
@@ -29,11 +29,4 @@
def __init__(self, *args, **kwargs):
super(MetadataManager, self).__init__(*args, **kwargs)
self.network_driver = driver.load_network_driver()
-
- def init_host(self):
- """Perform any initialization.
-
- Currently, we only add an iptables filter rule for the metadata
- service.
- """
self.network_driver.metadata_accept()
|
17b184e5c8d41eb083dc6400f6fca2a3eeb8f742
|
core/admin/mailu/internal/views.py
|
core/admin/mailu/internal/views.py
|
from mailu import db, models, app, limiter
from mailu.internal import internal, nginx
import flask
import flask_login
@internal.route("/auth/email")
@limiter.limit(
app.config["AUTH_RATELIMIT"],
lambda: flask.request.headers["Client-Ip"]
)
def nginx_authentication():
""" Main authentication endpoint for Nginx email server
"""
headers = nginx.handle_authentication(flask.request.headers)
response = flask.Response()
for key, value in headers.items():
response.headers[key] = str(value)
return response
@internal.route("/auth/admin")
def admin_authentication():
""" Fails if the user is not an authenticated admin.
"""
if (not flask_login.current_user.is_anonymous
and flask_login.current_user.global_admin):
return ""
return flask.abort(403)
|
from mailu import db, models, app, limiter
from mailu.internal import internal, nginx
import flask
import flask_login
import base64
import urllib
@internal.route("/auth/email")
@limiter.limit(
app.config["AUTH_RATELIMIT"],
lambda: flask.request.headers["Client-Ip"]
)
def nginx_authentication():
""" Main authentication endpoint for Nginx email server
"""
headers = nginx.handle_authentication(flask.request.headers)
response = flask.Response()
for key, value in headers.items():
response.headers[key] = str(value)
return response
@internal.route("/auth/admin")
def admin_authentication():
""" Fails if the user is not an authenticated admin.
"""
if (not flask_login.current_user.is_anonymous
and flask_login.current_user.global_admin):
return ""
return flask.abort(403)
@internal.route("/auth/basic")
def basic_authentication():
""" Tries to authenticate using the Authorization header.
"""
authorization = flask.request.headers.get("Authorization")
if authorization and authorization.startswith("Basic "):
encoded = authorization.replace("Basic ", "")
user_email, password = base64.b64decode(encoded).split(b":")
user = models.User.query.get(user_email.decode("utf8"))
if user and user.check_password(password.decode("utf8")):
response = flask.Response()
response.headers["X-User"] = user.email
return response
response = flask.Response(status=401)
response.headers["WWW-Authenticate"] = 'Basic realm="Login Required"'
return response
|
Implement a basic authentication API
|
Implement a basic authentication API
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
---
+++
@@ -3,6 +3,8 @@
import flask
import flask_login
+import base64
+import urllib
@internal.route("/auth/email")
@@ -28,3 +30,21 @@
and flask_login.current_user.global_admin):
return ""
return flask.abort(403)
+
+
+@internal.route("/auth/basic")
+def basic_authentication():
+ """ Tries to authenticate using the Authorization header.
+ """
+ authorization = flask.request.headers.get("Authorization")
+ if authorization and authorization.startswith("Basic "):
+ encoded = authorization.replace("Basic ", "")
+ user_email, password = base64.b64decode(encoded).split(b":")
+ user = models.User.query.get(user_email.decode("utf8"))
+ if user and user.check_password(password.decode("utf8")):
+ response = flask.Response()
+ response.headers["X-User"] = user.email
+ return response
+ response = flask.Response(status=401)
+ response.headers["WWW-Authenticate"] = 'Basic realm="Login Required"'
+ return response
|
c7c1fa91a0ec213bd648f2f50f95f5652891d3ab
|
main/readability_graph.py
|
main/readability_graph.py
|
import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT ari FROM comment_feature_read', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
|
import graph
from corpus.mysql.reddit import RedditMySQLCorpus
import cred
if __name__ == '__main__':
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
values = [ result[i]['ari'] for i in result ]
graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
'Frequency of ARI values')
values = [ result[i]['flesch_reading_ease'] for i in result ]
graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
'Frequency of Flesch Reading Ease values')
values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
'Frequency of Flesch Kincaid Grade Level values')
values = [ result[i]['gunning_fog_index'] for i in result ]
graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
'Frequency of Gunning Fog Index values')
values = [ result[i]['smog_index'] for i in result ]
graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
'Frequency of Smog Index values')
values = [ result[i]['coleman_liau_index'] for i in result ]
graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
'Frequency of Coleman Liau Index values')
values = [ result[i]['lix'] for i in result ]
graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
'Frequency of LIX values')
values = [ result[i]['rix'] for i in result ]
graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
'Frequency of RIX values')
|
Add other statistical measures for graphing
|
Add other statistical measures for graphing
|
Python
|
mit
|
worldwise001/stylometry
|
---
+++
@@ -7,7 +7,37 @@
corpus = RedditMySQLCorpus()
corpus.setup(**(cred.kwargs))
- result = corpus.run_sql('SELECT ari FROM comment_feature_read', None)
+ result = corpus.run_sql('SELECT * FROM comment_feature_read LIMIT 100', None)
print('Got results')
+
values = [ result[i]['ari'] for i in result ]
- graph.hist('data/ari_hist', values, 'ARI', 'Frequency', 'Frequency of ARI values')
+ graph.hist('data/ari_hist', values, 'ARI', 'Frequency',
+ 'Frequency of ARI values')
+
+ values = [ result[i]['flesch_reading_ease'] for i in result ]
+ graph.hist('data/flesch_reading_ease_hist', values, 'Flesch Reading Ease', 'Frequency',
+ 'Frequency of Flesch Reading Ease values')
+
+ values = [ result[i]['flesch_kincaid_grade_level'] for i in result ]
+ graph.hist('data/flesch_kincaid_grade_level_hist', values, 'Flesch Kincaid Grade Level', 'Frequency',
+ 'Frequency of Flesch Kincaid Grade Level values')
+
+ values = [ result[i]['gunning_fog_index'] for i in result ]
+ graph.hist('data/gunning_fog_index_hist', values, 'Gunning Fog Index', 'Frequency',
+ 'Frequency of Gunning Fog Index values')
+
+ values = [ result[i]['smog_index'] for i in result ]
+ graph.hist('data/smog_index_hist', values, 'Smog Index', 'Frequency',
+ 'Frequency of Smog Index values')
+
+ values = [ result[i]['coleman_liau_index'] for i in result ]
+ graph.hist('data/coleman_liau_index_hist', values, 'Coleman Liau Index', 'Frequency',
+ 'Frequency of Coleman Liau Index values')
+
+ values = [ result[i]['lix'] for i in result ]
+ graph.hist('data/lix_hist', values, 'LIX', 'Frequency',
+ 'Frequency of LIX values')
+
+ values = [ result[i]['rix'] for i in result ]
+ graph.hist('data/rix_hist', values, 'RIX', 'Frequency',
+ 'Frequency of RIX values')
|
7d988a0d622496065e3b1acbb9f4a32fa3678c9e
|
manager/manager/themes.py
|
manager/manager/themes.py
|
# Generated by generate.js. Commit this file, but do not edit it.
from manager.helpers import EnumChoice
# The version of Thema to use
version = "2.20.0"
class Themes(EnumChoice):
"""The list of Thema themes."""
bootstrap = "bootstrap"
elife = "elife"
f1000 = "f1000"
galleria = "galleria"
giga = "giga"
latex = "latex"
nature = "nature"
plos = "plos"
rpng = "rpng"
skeleton = "skeleton"
stencila = "stencila"
tufte = "tufte"
wilmore = "wilmore"
|
# Generated by generate.js. Commit this file, but do not edit it.
from manager.helpers import EnumChoice
# The version of Thema to use
version = "2.20.3"
class Themes(EnumChoice):
"""The list of Thema themes."""
bootstrap = "bootstrap"
elife = "elife"
f1000 = "f1000"
galleria = "galleria"
giga = "giga"
latex = "latex"
nature = "nature"
plos = "plos"
rpng = "rpng"
skeleton = "skeleton"
stencila = "stencila"
tufte = "tufte"
wilmore = "wilmore"
|
Update Thema version static string
|
chore(Dev): Update Thema version static string
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
---
+++
@@ -3,7 +3,7 @@
from manager.helpers import EnumChoice
# The version of Thema to use
-version = "2.20.0"
+version = "2.20.3"
class Themes(EnumChoice):
|
00ee7549c900d8c3bcae94141a8b8c774d943731
|
examples/new_member.py
|
examples/new_member.py
|
import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_member_join(self, member):
guild = member.guild
await guild.default_channel.send('Welcome {0.mention} to {1.name}!'.format(member, guild))
client = MyClient()
client.run('token')
|
import discord
class MyClient(discord.Client):
async def on_ready(self):
print('Logged in as')
print(self.user.name)
print(self.user.id)
print('------')
async def on_member_join(self, member):
guild = member.guild
if guild.system_channel is not None:
to_send = 'Welcome {0.mention} to {1.name}!'.format(member, guild)
await guild.system_channel.send(to_send)
client = MyClient()
client.run('token')
|
Update new member example to not be broken.
|
Update new member example to not be broken.
Took forever but better late than never.
|
Python
|
mit
|
imayhaveborkedit/discord.py,Harmon758/discord.py,Rapptz/discord.py,Harmon758/discord.py,khazhyk/discord.py,rapptz/discord.py
|
---
+++
@@ -9,7 +9,10 @@
async def on_member_join(self, member):
guild = member.guild
- await guild.default_channel.send('Welcome {0.mention} to {1.name}!'.format(member, guild))
+ if guild.system_channel is not None:
+ to_send = 'Welcome {0.mention} to {1.name}!'.format(member, guild)
+ await guild.system_channel.send(to_send)
+
client = MyClient()
client.run('token')
|
3588c52060b540f6d3ca791c7309b4e9185a60aa
|
config.py
|
config.py
|
class Config(object):
"""
Base configuration class. Contains one method that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
def __init__(self):
self.database_uri()
def database_uri(self):
if self.DIALECT == 'sqlite':
self.DATABASE_URI = r'sqlite://{name}'.format(name=self.DBNAME)
else:
self.DATABASE_URI = r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
|
class Config(object):
"""
Base configuration class. Contains one property that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
@property
def database_uri(self):
return r'sqlite://{name}'.format(name=self.DBNAME) if self.DIALECT == 'sqlite' else \
r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
|
Replace database_uri method with a property
|
Replace database_uri method with a property
|
Python
|
mit
|
soccermetrics/marcotti-mls
|
---
+++
@@ -1,18 +1,14 @@
class Config(object):
"""
- Base configuration class. Contains one method that defines the database URI.
+ Base configuration class. Contains one property that defines the database URI.
This class is to be subclassed and its attributes defined therein.
"""
- def __init__(self):
- self.database_uri()
-
+ @property
def database_uri(self):
- if self.DIALECT == 'sqlite':
- self.DATABASE_URI = r'sqlite://{name}'.format(name=self.DBNAME)
- else:
- self.DATABASE_URI = r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
+ return r'sqlite://{name}'.format(name=self.DBNAME) if self.DIALECT == 'sqlite' else \
+ r'{dialect}://{user}:{passwd}@{host}:{port}/{name}'.format(
dialect=self.DIALECT, user=self.DBUSER, passwd=self.DBPASSWD,
host=self.HOSTNAME, port=self.PORT, name=self.DBNAME
)
|
e3f6b604c90032dc1fb9dcc9838f11aa10498dae
|
pi_approach/UI/main.py
|
pi_approach/UI/main.py
|
# Touchscreen Kivy Interface for Lidar Project
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.core.window import Window
#Window.clearcolor=(1,1,1,1)
class Init_Screen(GridLayout):
pass
class Main_Screen(GridLayout):
angle = 0
def change_value(self, *args):
value_slider = self.ids['value_slider']
self.angle = int(value_slider.value)
if self.angle == 361:
self.angle = "CONT"
value_label = self.ids['value_label']
value_label.text = "[size=10]" + str(self.angle) + "[/size]"
def scan(self, *args):
# Remember to add "if lidar/camera are on"
print self.angle
# Scan through this angle
class LidarApp(App):
def build(self):
return Main_Screen()
if __name__ == '__main__':
LidarApp().run()
|
# Touchscreen Kivy Interface for Lidar Project
import socket
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.core.window import Window
#Window.clearcolor=(1,1,1,1)
class Init_Screen(GridLayout):
pass
class Main_Screen(GridLayout):
angle = 0
def change_value(self, *args):
value_slider = self.ids["value_slider"]
self.angle = int(value_slider.value)
if self.angle == 361:
self.angle = "CONT"
value_label = self.ids['value_label']
value_label.text = "[size=10]" + str(self.angle) + "[/size]"
def scan(self, *args):
# Remember to add "if lidar/camera are on"
print self.angle
# Scan through this angle
class LidarApp(App):
def build(self):
return Main_Screen()
class Server(object):
"""A class that serves a server and nothing else"""
HOST = socket.gethostname() + ".local"
PORT = 12345
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
def setup_server(self):
try:
Server.s.bind((Server.HOST, Server.PORT))
except socket.error:
return "Bind failed"
def socket_connection(self):
Server.s.listen(5)
(connection, address) = Server.s.accept()
return (connection, address)
def receive_data(self, connection):
data = connection.recv(4096)
return data
def send_data(self, connection, data):
connection.send(data)
def close_connection(self, connection):
connection.close()
if __name__ == "__main__":
LidarApp().run()
|
Create a server class from scratch
|
Create a server class from scratch
|
Python
|
mit
|
the-raspberry-pi-guy/lidar
|
---
+++
@@ -1,5 +1,6 @@
# Touchscreen Kivy Interface for Lidar Project
+import socket
from kivy.app import App
from kivy.uix.gridlayout import GridLayout
from kivy.core.window import Window
@@ -14,7 +15,7 @@
angle = 0
def change_value(self, *args):
- value_slider = self.ids['value_slider']
+ value_slider = self.ids["value_slider"]
self.angle = int(value_slider.value)
if self.angle == 361:
self.angle = "CONT"
@@ -30,5 +31,32 @@
def build(self):
return Main_Screen()
-if __name__ == '__main__':
+class Server(object):
+ """A class that serves a server and nothing else"""
+ HOST = socket.gethostname() + ".local"
+ PORT = 12345
+ s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+
+ def setup_server(self):
+ try:
+ Server.s.bind((Server.HOST, Server.PORT))
+ except socket.error:
+ return "Bind failed"
+
+ def socket_connection(self):
+ Server.s.listen(5)
+ (connection, address) = Server.s.accept()
+ return (connection, address)
+
+ def receive_data(self, connection):
+ data = connection.recv(4096)
+ return data
+
+ def send_data(self, connection, data):
+ connection.send(data)
+
+ def close_connection(self, connection):
+ connection.close()
+
+if __name__ == "__main__":
LidarApp().run()
|
512ec31a3c022bc8a31d57bc51e4e6dac29dcf83
|
src/sentry/web/frontend/organization_api_key_settings.py
|
src/sentry/web/frontend/organization_api_key_settings.py
|
from __future__ import absolute_import
from django import forms
from django.utils.translation import ugettext_lazy as _
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.forms.fields import OriginsField
from sentry.web.frontend.base import OrganizationView
class ApiKeyForm(forms.ModelForm):
allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,
help_text=_('Separate multiple entries with a newline.'))
class Meta:
model = ApiKey
fields = ('label', 'scopes', 'allowed_origins')
class OrganizationApiKeySettingsView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization, key_id):
key = ApiKey.objects.get(organization=organization, id=key_id)
form = ApiKeyForm(request.POST or None, instance=key)
context = {
'key': key,
'form': form,
}
return self.respond('sentry/organization-api-key-settings.html', context)
|
from __future__ import absolute_import
from django import forms
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from sentry.models import ApiKey, OrganizationMemberType
from sentry.web.forms.fields import OriginsField
from sentry.web.frontend.base import OrganizationView
class ApiKeyForm(forms.ModelForm):
allowed_origins = OriginsField(label=_('Allowed Domains'), required=False,
help_text=_('Separate multiple entries with a newline.'))
class Meta:
model = ApiKey
fields = ('label', 'scopes', 'allowed_origins')
class OrganizationApiKeySettingsView(OrganizationView):
required_access = OrganizationMemberType.ADMIN
def handle(self, request, organization, key_id):
key = ApiKey.objects.get(organization=organization, id=key_id)
form = ApiKeyForm(request.POST or None, instance=key)
if form.is_valid():
key.save()
messages.add_message(
request, messages.SUCCESS,
'Your settings were saved.',
)
return HttpResponseRedirect(request.path)
context = {
'key': key,
'form': form,
}
return self.respond('sentry/organization-api-key-settings.html', context)
|
Allow key settings to be saved
|
Allow key settings to be saved
|
Python
|
bsd-3-clause
|
hongliang5623/sentry,TedaLIEz/sentry,gg7/sentry,ifduyue/sentry,wujuguang/sentry,boneyao/sentry,fuziontech/sentry,fuziontech/sentry,pauloschilling/sentry,mvaled/sentry,JackDanger/sentry,imankulov/sentry,kevinlondon/sentry,kevinlondon/sentry,vperron/sentry,looker/sentry,Natim/sentry,looker/sentry,beeftornado/sentry,Kryz/sentry,llonchj/sentry,ngonzalvez/sentry,1tush/sentry,1tush/sentry,looker/sentry,mvaled/sentry,zenefits/sentry,drcapulet/sentry,hongliang5623/sentry,llonchj/sentry,JackDanger/sentry,ifduyue/sentry,korealerts1/sentry,mitsuhiko/sentry,zenefits/sentry,argonemyth/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,ngonzalvez/sentry,JamesMura/sentry,mvaled/sentry,drcapulet/sentry,argonemyth/sentry,ewdurbin/sentry,ifduyue/sentry,ngonzalvez/sentry,felixbuenemann/sentry,jean/sentry,ifduyue/sentry,JTCunning/sentry,kevinastone/sentry,JamesMura/sentry,gencer/sentry,JamesMura/sentry,alexm92/sentry,TedaLIEz/sentry,kevinlondon/sentry,nicholasserra/sentry,BuildingLink/sentry,argonemyth/sentry,daevaorn/sentry,mvaled/sentry,songyi199111/sentry,BayanGroup/sentry,korealerts1/sentry,wong2/sentry,fotinakis/sentry,JTCunning/sentry,1tush/sentry,llonchj/sentry,kevinastone/sentry,Kryz/sentry,beeftornado/sentry,BuildingLink/sentry,JamesMura/sentry,fotinakis/sentry,vperron/sentry,fotinakis/sentry,felixbuenemann/sentry,imankulov/sentry,mvaled/sentry,wong2/sentry,wong2/sentry,boneyao/sentry,pauloschilling/sentry,beeftornado/sentry,alexm92/sentry,daevaorn/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,daevaorn/sentry,zenefits/sentry,wujuguang/sentry,fuziontech/sentry,mvaled/sentry,hongliang5623/sentry,fotinakis/sentry,drcapulet/sentry,looker/sentry,JTCunning/sentry,jean/sentry,BayanGroup/sentry,vperron/sentry,jean/sentry,JackDanger/sentry,BayanGroup/sentry,boneyao/sentry,gencer/sentry,alexm92/sentry,looker/sentry,ifduyue/sentry,ewdurbin/sentry,daevaorn/sentry,gencer/sentry,JamesMura/sentry,wujuguang/sentry,imankulov/sentry,nicholasserra/sentry,Kryz/sentry,gg7/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,Natim/sentry,TedaLIEz/sentry,pauloschilling/sentry,nicholasserra/sentry,ewdurbin/sentry,songyi199111/sentry,mitsuhiko/sentry,felixbuenemann/sentry,gg7/sentry,songyi199111/sentry,korealerts1/sentry,kevinastone/sentry,zenefits/sentry
|
---
+++
@@ -1,6 +1,8 @@
from __future__ import absolute_import
from django import forms
+from django.contrib import messages
+from django.http import HttpResponseRedirect
from django.utils.translation import ugettext_lazy as _
from sentry.models import ApiKey, OrganizationMemberType
@@ -24,6 +26,13 @@
key = ApiKey.objects.get(organization=organization, id=key_id)
form = ApiKeyForm(request.POST or None, instance=key)
+ if form.is_valid():
+ key.save()
+ messages.add_message(
+ request, messages.SUCCESS,
+ 'Your settings were saved.',
+ )
+ return HttpResponseRedirect(request.path)
context = {
'key': key,
|
91835ee095aaa36cb44ed0bca691045678a32703
|
wrapper_functions.py
|
wrapper_functions.py
|
"""
Doc string here.
@author mje
@email: mads [] cnru.dk
"""
import sys
import subprocess
cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis"
subjects = ["0004", "0005", "0006", "0007", "0008", "0009", "0010", "0011",
"0012", "0013", "0014", "0015", "0016", "0017", "0020", "0021",
"0022", "0023", "0024", "0025"] # subject to run
for subject in subjects:
submit_cmd = "python %s %s" % (sys.argv[1], subject)
subprocess.call([cmd, "3", submit_cmd])
|
"""
Doc string here.
@author mje
@email: mads [] cnru.dk
"""
import sys
import subprocess
cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis"
subjects = ["0004", "0005", "0006", "0007", "0008", "0009", "0010", "0011",
"0012", "0013", "0014", "0015", "0016", "0017", "0020", "0021",
"0022", "0023", "0024", "0025"] # subject to run
for subject in subjects:
submit_cmd = "python %s %s" % (sys.argv[1], subject)
subprocess.call([cmd, "1", submit_cmd])
|
Change number of cores for submit
|
Change number of cores for submit
|
Python
|
mit
|
MadsJensen/malthe_alpha_project,MadsJensen/malthe_alpha_project
|
---
+++
@@ -16,4 +16,4 @@
for subject in subjects:
submit_cmd = "python %s %s" % (sys.argv[1], subject)
- subprocess.call([cmd, "3", submit_cmd])
+ subprocess.call([cmd, "1", submit_cmd])
|
ecea45689fdebf409b06311cc8e84b2e609967e1
|
manila/tests/fake_driver.py
|
manila/tests/fake_driver.py
|
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila.openstack.common import log as logging
from manila.share.drivers import generic
LOG = logging.getLogger(__name__)
class FakeShareDriver(generic.GenericShareDriver):
"""Logs calls instead of executing."""
def __init__(self, *args, **kwargs):
super(FakeShareDriver, self).__init__(execute=self.fake_execute,
*args, **kwargs)
def do_setup(self, context):
"""Fake setup of Generic driver."""
pass
def check_for_setup_error(self):
"""No setup necessary in fake mode."""
pass
@staticmethod
def fake_execute(cmd, *_args, **_kwargs):
"""Execute that simply logs the command."""
LOG.debug("FAKE EXECUTE: %s", cmd)
return (None, None)
|
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from manila.openstack.common import log as logging
from manila.share.drivers import generic
LOG = logging.getLogger(__name__)
class FakeShareDriver(generic.GenericShareDriver):
"""Logs calls instead of executing."""
def __init__(self, *args, **kwargs):
super(FakeShareDriver, self).__init__(execute=self.fake_execute,
*args, **kwargs)
def do_setup(self, context):
"""Fake setup of Generic driver."""
pass
def check_for_setup_error(self):
"""No setup necessary in fake mode."""
pass
def ensure_share(self, *args, **kwargs):
"""Fake ensure_share for fake mode."""
pass
@staticmethod
def fake_execute(cmd, *_args, **_kwargs):
"""Execute that simply logs the command."""
LOG.debug("FAKE EXECUTE: %s", cmd)
return (None, None)
|
Fix failing of unittests in one thread
|
Fix failing of unittests in one thread
With change of default driver, that is used for unittests one additional
method 'ensure_share' was not mocked, that caused problems running
unittests in one thread.
Change-Id: Iea08207bb6200c37f5608d222dccdfbdc509b234
Closes-Bug: #1358854
|
Python
|
apache-2.0
|
vponomaryov/manila,weiting-chen/manila,jcsp/manila,openstack/manila,sajuptpm/manila,sajuptpm/manila,scality/manila,bswartz/manila,NetApp/manila,scality/manila,vponomaryov/manila,jcsp/manila,redhat-openstack/manila,redhat-openstack/manila,openstack/manila,NetApp/manila,weiting-chen/manila,bswartz/manila
|
---
+++
@@ -32,6 +32,10 @@
"""No setup necessary in fake mode."""
pass
+ def ensure_share(self, *args, **kwargs):
+ """Fake ensure_share for fake mode."""
+ pass
+
@staticmethod
def fake_execute(cmd, *_args, **_kwargs):
"""Execute that simply logs the command."""
|
fd4c7e3af81a4a37462dfcd7c3ac4eb43bdafcb2
|
crmapp/subscribers/models.py
|
crmapp/subscribers/models.py
|
from django.db import models
from django.contrib.auth.models import User
class Subscriber(models.Model):
user_rec = models.ForeignKey(User)
address_one = models.CharField(max_length=100)
address_two = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=50)
state = models.CharField(max_length=2)
stripe_id = models.CharField(max_length=30, blank=True)
class Meta:
verbose_name_plural = 'subscribers'
def __unicode__(self):
return u"%s's Subscription Info" % self.user_rec
|
from django.db import models
from django.contrib.auth.models import User
from django.conf import settings
import stripe
class Subscriber(models.Model):
user_rec = models.ForeignKey(User)
address_one = models.CharField(max_length=100)
address_two = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=50)
state = models.CharField(max_length=2)
stripe_id = models.CharField(max_length=30, blank=True)
class Meta:
verbose_name_plural = 'subscribers'
def __unicode__(self):
return u"%s's Subscription Info" % self.user_rec
def charge(self, request, email, fee):
# Set your secret key: remember to change this to your live secret key
# in production. See your keys here https://manage.stripe.com/account
stripe.api_key = settings.STRIPE_SECRET_KEY
# Get the credit card details submitted by the form
token = request.POST['stripeToken']
# Create a Customer
stripe_customer = stripe.Customer.create(
card=token,
description=email
)
# Save the Stripe ID to the customer's profile
self.stripe_id = stripe_customer.id
self.save()
# Charge the Customer instead of the card
stripe.Charge.create(
amount=fee, # in cents
currency="usd",
customer=stripe_customer.id
)
return stripe_customer
|
Create the Subscriber Form - Part III > Create Stripe Processing Code
|
Create the Subscriber Form - Part III > Create Stripe Processing Code
|
Python
|
mit
|
deenaariff/Django,tabdon/crmeasyapp,tabdon/crmeasyapp
|
---
+++
@@ -1,5 +1,9 @@
from django.db import models
from django.contrib.auth.models import User
+from django.conf import settings
+
+import stripe
+
class Subscriber(models.Model):
user_rec = models.ForeignKey(User)
@@ -14,3 +18,30 @@
def __unicode__(self):
return u"%s's Subscription Info" % self.user_rec
+
+ def charge(self, request, email, fee):
+ # Set your secret key: remember to change this to your live secret key
+ # in production. See your keys here https://manage.stripe.com/account
+ stripe.api_key = settings.STRIPE_SECRET_KEY
+
+ # Get the credit card details submitted by the form
+ token = request.POST['stripeToken']
+
+ # Create a Customer
+ stripe_customer = stripe.Customer.create(
+ card=token,
+ description=email
+ )
+
+ # Save the Stripe ID to the customer's profile
+ self.stripe_id = stripe_customer.id
+ self.save()
+
+ # Charge the Customer instead of the card
+ stripe.Charge.create(
+ amount=fee, # in cents
+ currency="usd",
+ customer=stripe_customer.id
+ )
+
+ return stripe_customer
|
25a95d34fcfa9447302ec399affdee14e0362cd7
|
write_graphs.py
|
write_graphs.py
|
"""Save graphical representations of all the lyman workflows."""
import os
import re
from glob import glob
from lyman import workflows as wf
from nipype import config
def main():
config.set('logging', 'workflow_level', 'CRITICAL')
# Find the functions that create workflows
wf_funcs = [k for k in dir(wf) if re.match("create_.*_workflow", k)]
for func in wf_funcs:
try:
out = getattr(wf, func)()
except:
print "ERROR: call to %s failed" % func
# Some of the workflow functions return (flow, inputs, outputs)
try:
flow, _, _ = out
except TypeError:
flow = out
# Write the graphs
name = flow.name
flow.write_graph("graphs/%s.dot" % name, "orig")
# Remove the .dot files as they are not of use to us
files = glob("graphs/*")
for f in files:
if f.endswith(".dot"):
os.remove(f)
if __name__ == "__main__":
main()
|
"""Save graphical representations of all the lyman workflows."""
import os
import re
import sys
from glob import glob
from lyman import workflows as wf
from nipype import config
def main(arglist):
config.set('logging', 'workflow_level', 'CRITICAL')
# Find the functions that create workflows
wf_funcs = [k for k in dir(wf) if re.match("create_.*_workflow", k)]
for func in wf_funcs:
try:
out = getattr(wf, func)()
except:
print "ERROR: call to %s failed" % func
# Some of the workflow functions return (flow, inputs, outputs)
try:
flow, _, _ = out
except TypeError:
flow = out
# Write the graphs
name = flow.name
if arglist:
if name in arglist:
flow.write_graph("graphs/%s.dot" % name, "orig")
else:
flow.write_graph("graphs/%s.dot" % name, "orig")
# Remove the .dot files as they are not of use to us
files = glob("graphs/*")
for f in files:
if f.endswith(".dot"):
os.remove(f)
if __name__ == "__main__":
main(sys.argv[1:])
|
Allow for writing specific graph images
|
Allow for writing specific graph images
|
Python
|
bsd-3-clause
|
tuqc/lyman,mwaskom/lyman,kastman/lyman
|
---
+++
@@ -1,12 +1,13 @@
"""Save graphical representations of all the lyman workflows."""
import os
import re
+import sys
from glob import glob
from lyman import workflows as wf
from nipype import config
-def main():
+def main(arglist):
config.set('logging', 'workflow_level', 'CRITICAL')
@@ -27,7 +28,11 @@
# Write the graphs
name = flow.name
- flow.write_graph("graphs/%s.dot" % name, "orig")
+ if arglist:
+ if name in arglist:
+ flow.write_graph("graphs/%s.dot" % name, "orig")
+ else:
+ flow.write_graph("graphs/%s.dot" % name, "orig")
# Remove the .dot files as they are not of use to us
files = glob("graphs/*")
@@ -37,4 +42,4 @@
if __name__ == "__main__":
- main()
+ main(sys.argv[1:])
|
89971ece16ee1c062a8a54fa5cd83c473628c2ba
|
pyanyapi/helpers.py
|
pyanyapi/helpers.py
|
# coding: utf-8
"""
Functions to dynamically attach attributes to classes.
Most of parsing result is cached because of immutability of input data.
"""
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
if instance is None:
return self
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
|
# coding: utf-8
"""
Functions to dynamically attach attributes to classes.
Most of parsing result is cached because of immutability of input data.
"""
class cached_property(object):
"""
Copied from Django.
"""
def __init__(self, func):
self.func = func
def __get__(self, instance, type=None):
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
def attach_attribute(target, name, attr):
attr.__name__ = name
setattr(target, name, attr)
def attach_cached_property(target, name, prop):
method = cached_property(prop)
attach_attribute(target, name, method)
|
Remove unused line from cached_property
|
Remove unused line from cached_property
|
Python
|
mit
|
gorlemik/pyanyapi,Stranger6667/pyanyapi
|
---
+++
@@ -13,8 +13,6 @@
self.func = func
def __get__(self, instance, type=None):
- if instance is None:
- return self
res = instance.__dict__[self.func.__name__] = self.func(instance)
return res
|
fd0c368d6527c1a20e904ff911238d4e75811e4f
|
pykeg/core/tests.py
|
pykeg/core/tests.py
|
"""Builds a test suite for all tests in the 'core' directory.
The django-admin command `tests` looks for a tests.py file and expects a suite()
routine to return a unittest.TestSuite.
"""
import unittest
import KegbotJsonServer_unittest
import kegbot_unittest
import models_unittest
import StateMachine_unittest
import units_unittest
import util_unittest
from pykeg.core.Devices import Net_unittest
ALL_TEST_MODULES = (
models_unittest,
StateMachine_unittest,
units_unittest,
util_unittest,
Net_unittest,
KegbotJsonServer_unittest,
kegbot_unittest,
)
def suite():
suite = unittest.TestSuite()
for module in ALL_TEST_MODULES:
suite.addTests(unittest.TestLoader().loadTestsFromModule(module))
return suite
|
# Copyright 2009 Mike Wakerly <opensource@hoho.com>
#
# This file is part of the Pykeg package of the Kegbot project.
# For more information on Pykeg or Kegbot, see http://kegbot.org/
#
# Pykeg is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Pykeg is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
"""Builds a test suite for all tests in the 'core' directory.
The django-admin command `tests` looks for a tests.py file and expects a suite()
routine to return a unittest.TestSuite.
"""
import unittest
from pykeg.core import alarm_unittest
from pykeg.core import kegbot_unittest
from pykeg.core import models_unittest
from pykeg.core import StateMachine_unittest
from pykeg.core import units_unittest
from pykeg.core import util_unittest
from pykeg.core.net import kegnet_unittest
ALL_TEST_MODULES = (
alarm_unittest,
models_unittest,
StateMachine_unittest,
units_unittest,
util_unittest,
kegnet_unittest,
kegbot_unittest,
)
def suite():
suite = unittest.TestSuite()
for module in ALL_TEST_MODULES:
suite.addTests(unittest.TestLoader().loadTestsFromModule(module))
return suite
|
Update unittest suite for new modules.
|
Update unittest suite for new modules.
|
Python
|
mit
|
Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server
|
---
+++
@@ -1,26 +1,44 @@
+# Copyright 2009 Mike Wakerly <opensource@hoho.com>
+#
+# This file is part of the Pykeg package of the Kegbot project.
+# For more information on Pykeg or Kegbot, see http://kegbot.org/
+#
+# Pykeg is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 2 of the License, or
+# (at your option) any later version.
+#
+# Pykeg is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Pykeg. If not, see <http://www.gnu.org/licenses/>.
+
"""Builds a test suite for all tests in the 'core' directory.
The django-admin command `tests` looks for a tests.py file and expects a suite()
routine to return a unittest.TestSuite.
"""
+
import unittest
-import KegbotJsonServer_unittest
-import kegbot_unittest
-import models_unittest
-import StateMachine_unittest
-import units_unittest
-import util_unittest
-
-from pykeg.core.Devices import Net_unittest
+from pykeg.core import alarm_unittest
+from pykeg.core import kegbot_unittest
+from pykeg.core import models_unittest
+from pykeg.core import StateMachine_unittest
+from pykeg.core import units_unittest
+from pykeg.core import util_unittest
+from pykeg.core.net import kegnet_unittest
ALL_TEST_MODULES = (
+ alarm_unittest,
models_unittest,
StateMachine_unittest,
units_unittest,
util_unittest,
- Net_unittest,
- KegbotJsonServer_unittest,
+ kegnet_unittest,
kegbot_unittest,
)
|
3301e7101cb73674047613ef8a20c16cd2d504da
|
mots_vides/tests/factory.py
|
mots_vides/tests/factory.py
|
"""
Tests for StopWordFactory
"""
import os
from unittest import TestCase
from mots_vides.stop_words import StopWord
from mots_vides.factory import StopWordFactory
class StopWordFactoryTestCase(TestCase):
def setUp(self):
self.data_directory = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/')
self.factory = StopWordFactory(self.data_directory)
def test_get_stopwords(self):
sw = self.factory.get_stop_words('klingon')
self.assertTrue(isinstance(sw, StopWord))
self.assertEqual(list(sw.collection),
['nuq', "HIja'", "ghobe'", 'naDev'])
def test_get_collection_filename(self):
filename = self.factory.get_collection_filename('foo')
self.assertTrue(filename.endswith('foo.txt'))
self.assertTrue(filename.startswith(self.data_directory))
|
"""
Tests for StopWordFactory
"""
import os
from unittest import TestCase
from mots_vides.stop_words import StopWord
from mots_vides.factory import StopWordFactory
class StopWordFactoryTestCase(TestCase):
def setUp(self):
self.data_directory = os.path.join(
os.path.dirname(
os.path.abspath(__file__)),
'datas/')
self.factory = StopWordFactory(self.data_directory)
def test_get_stopwords(self):
sw = self.factory.get_stop_words('klingon')
self.assertTrue(isinstance(sw, StopWord))
self.assertEqual(list(sw.collection),
['nuq', "HIja'", "ghobe'", 'naDev'])
def test_get_stopwords_cache(self):
pass
def test_get_stopwords_shortcuts(self):
pass
def test_get_stopwords_fail_safe(self):
pass
def test_available_languages(self):
self.assertEqual(self.factory.available_languages,
['klingon', 'sindarin'])
self.factory.data_directory = '/brutal/change/'
self.assertEqual(self.factory.available_languages,
['klingon', 'sindarin'])
def test_get_collection_filename(self):
filename = self.factory.get_collection_filename('foo')
self.assertTrue(filename.endswith('foo.txt'))
self.assertTrue(filename.startswith(self.data_directory))
def test_read_collection(self):
pass
def test_write_collection(self):
pass
|
Complete the list of expected tests
|
Complete the list of expected tests
|
Python
|
bsd-3-clause
|
Fantomas42/mots-vides,Fantomas42/mots-vides
|
---
+++
@@ -24,7 +24,29 @@
self.assertEqual(list(sw.collection),
['nuq', "HIja'", "ghobe'", 'naDev'])
+ def test_get_stopwords_cache(self):
+ pass
+
+ def test_get_stopwords_shortcuts(self):
+ pass
+
+ def test_get_stopwords_fail_safe(self):
+ pass
+
+ def test_available_languages(self):
+ self.assertEqual(self.factory.available_languages,
+ ['klingon', 'sindarin'])
+ self.factory.data_directory = '/brutal/change/'
+ self.assertEqual(self.factory.available_languages,
+ ['klingon', 'sindarin'])
+
def test_get_collection_filename(self):
filename = self.factory.get_collection_filename('foo')
self.assertTrue(filename.endswith('foo.txt'))
self.assertTrue(filename.startswith(self.data_directory))
+
+ def test_read_collection(self):
+ pass
+
+ def test_write_collection(self):
+ pass
|
c0d84ec83dcd62f556cbd236abd40d54e15b1008
|
fabric/__init__.py
|
fabric/__init__.py
|
from context_managers import warnings_only
from decorators import hosts, roles, runs_once
from operations import require, prompt, put, get, run, sudo, local
from state import env
from utils import abort, warn
|
from context_managers import warnings_only
from decorators import hosts, roles, runs_once
from operations import require, prompt, put, get, run, sudo, local
from state import env
from utils import abort, warn
from version import get_version
|
Update public API to include get_version() so setup.py doesn't die.
|
Update public API to include get_version() so setup.py doesn't die.
Thanks to Curt Micol.
|
Python
|
bsd-2-clause
|
xLegoz/fabric,pashinin/fabric,tekapo/fabric,pgroudas/fabric,bitmonk/fabric,jaraco/fabric,amaniak/fabric,cmattoon/fabric,askulkarni2/fabric,StackStorm/fabric,haridsv/fabric,TarasRudnyk/fabric,ericholscher/fabric,ploxiln/fabric,kxxoling/fabric,fernandezcuesta/fabric,rbramwell/fabric,hrubi/fabric,raimon49/fabric,bspink/fabric,opavader/fabric,SamuelMarks/fabric,itoed/fabric,getsentry/fabric,sdelements/fabric,bitprophet/fabric,MjAbuz/fabric,akaariai/fabric,rodrigc/fabric,mathiasertl/fabric,elijah513/fabric,kmonsoor/fabric,likesxuqiang/fabric,simon-engledew/fabric,cgvarela/fabric,felix-d/fabric,qinrong/fabric,rane-hs/fabric-py3,tolbkni/fabric
|
---
+++
@@ -3,3 +3,4 @@
from operations import require, prompt, put, get, run, sudo, local
from state import env
from utils import abort, warn
+from version import get_version
|
59c9b0a3914920c19c9ccdbf5d77e4ce990d7d58
|
rdmo/core/models.py
|
rdmo/core/models.py
|
from django.db import models
from django.utils.timezone import now
from django.utils.translation import get_language
from django.utils.translation import ugettext_lazy as _
from rdmo.core.utils import get_languages
class Model(models.Model):
created = models.DateTimeField(editable=False, verbose_name=_('created'))
updated = models.DateTimeField(editable=False, verbose_name=_('updated'))
class Meta:
abstract = True
def save(self, *args, **kwargs):
if self.created is None:
self.created = now()
self.updated = now()
super(Model, self).save(*args, **kwargs)
class TranslationMixin(object):
def trans(self, field):
current_language = get_language()
languages = get_languages()
for lang_code, lang_string, lang_field in languages:
if lang_code == current_language:
return getattr(self, '%s_%s' % (field, lang_field))
r = ''
for i in range(1, 6):
try:
r = getattr(self, '%s_%s' % (field, 'lang' + str(i)))
except AttributeError:
pass
else:
if r != '':
return r
primary_lang_field = languages[0][2]
return getattr(self, '%s_%s' % (field, primary_lang_field)) or ''
|
from django.db import models
from django.utils.timezone import now
from django.utils.translation import get_language
from django.utils.translation import ugettext_lazy as _
from rdmo.core.utils import get_languages
class Model(models.Model):
created = models.DateTimeField(editable=False, verbose_name=_('created'))
updated = models.DateTimeField(editable=False, verbose_name=_('updated'))
class Meta:
abstract = True
def save(self, *args, **kwargs):
if self.created is None:
self.created = now()
self.updated = now()
super(Model, self).save(*args, **kwargs)
class TranslationMixin(object):
def trans(self, field):
current_language = get_language()
languages = get_languages()
for lang_code, lang_string, lang_field in languages:
if lang_code == current_language:
return getattr(self, '%s_%s' % (field, lang_field))
r = ''
for i in range(1, 6):
try:
r = getattr(self, '%s_%s' % (field, 'lang' + str(i)))
except AttributeError:
pass
else:
if r != '':
return r
return r
primary_lang_field = languages[0][2]
return getattr(self, '%s_%s' % (field, primary_lang_field)) or ''
|
Add value to return if nothing is found
|
Add value to return if nothing is found
|
Python
|
apache-2.0
|
rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo,DMPwerkzeug/DMPwerkzeug,rdmorganiser/rdmo
|
---
+++
@@ -41,6 +41,7 @@
else:
if r != '':
return r
+ return r
primary_lang_field = languages[0][2]
return getattr(self, '%s_%s' % (field, primary_lang_field)) or ''
|
91de92361ae02d796da4ef6b58ac8e63ca437614
|
dataviva/apps/partners/models.py
|
dataviva/apps/partners/models.py
|
from dataviva import db
class Call(db.Model):
__tablename__ = 'partner_call'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(400))
link = db.Column(db.String(250))
|
from dataviva import db
class Call(db.Model):
__tablename__ = 'partner_call'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(400))
link = db.Column(db.String(250))
active = db.Column(db.Integer)
|
Add colunm active in calls tables.
|
Add colunm active in calls tables.
|
Python
|
mit
|
DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site
|
---
+++
@@ -5,4 +5,5 @@
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(400))
link = db.Column(db.String(250))
+ active = db.Column(db.Integer)
|
881d41b2fc465d018a1247419b6a2487c71b88b4
|
pft/tests/test_basics.py
|
pft/tests/test_basics.py
|
"""Basic Unit Tests."""
import unittest
from flask import current_app
from .. import create_app
from ..database import db
class BasicsTestCase(unittest.TestCase):
"""Basic Test Case."""
def setUp(self):
"""Set up tests."""
self.app = create_app('testing')
self.app_context = self.app.app_context()
self.app_context.push()
db.create_all()
def tearDown(self):
"""Clean up after tests."""
db.session.remove()
db.drop_all()
self.app_context.pop()
def test_app_exists(self):
"""Test app exists."""
self.assertFalse(current_app is None)
def test_app_is_testing(self):
"""Test app is testing."""
self.assertTrue(current_app.config['TESTING'])
|
"""Basic Unit Tests."""
import pytest
from flask import current_app
from .. import create_app
from ..database import db
@pytest.fixture(autouse=True)
def initialise_testing_db():
"""Create database before testing, delete after."""
app = create_app('testing')
app_context = app.app_context()
app_context.push()
db.create_all()
yield
db.session.remove()
db.drop_all()
app_context.pop()
def test_app_exists():
"""Test app exists."""
assert current_app is not None
def test_app_is_testing():
"""Test app is testing."""
assert current_app.config['TESTING']
|
Convert basic tests to pytest
|
Convert basic tests to pytest
|
Python
|
unknown
|
gregcowell/BAM,gregcowell/PFT,gregcowell/PFT,gregcowell/BAM
|
---
+++
@@ -1,30 +1,28 @@
"""Basic Unit Tests."""
-import unittest
+import pytest
from flask import current_app
from .. import create_app
from ..database import db
-class BasicsTestCase(unittest.TestCase):
- """Basic Test Case."""
+@pytest.fixture(autouse=True)
+def initialise_testing_db():
+ """Create database before testing, delete after."""
+ app = create_app('testing')
+ app_context = app.app_context()
+ app_context.push()
+ db.create_all()
+ yield
+ db.session.remove()
+ db.drop_all()
+ app_context.pop()
- def setUp(self):
- """Set up tests."""
- self.app = create_app('testing')
- self.app_context = self.app.app_context()
- self.app_context.push()
- db.create_all()
- def tearDown(self):
- """Clean up after tests."""
- db.session.remove()
- db.drop_all()
- self.app_context.pop()
+def test_app_exists():
+ """Test app exists."""
+ assert current_app is not None
- def test_app_exists(self):
- """Test app exists."""
- self.assertFalse(current_app is None)
- def test_app_is_testing(self):
- """Test app is testing."""
- self.assertTrue(current_app.config['TESTING'])
+def test_app_is_testing():
+ """Test app is testing."""
+ assert current_app.config['TESTING']
|
550c9754f3e3a93c5ece36befb3ecbcffc6a3d2b
|
helper.py
|
helper.py
|
from fabric.api import hide, settings
from fabric.colors import blue
from contextlib import contextmanager
def header(txt):
"""Decorate a string to make it stand out as a header. """
wrapper = "------------------------------------------------------"
return blue(wrapper + "\n" + txt + "\n" + wrapper, bold=True)
@contextmanager
def mute():
"""Run a fabric command without reporting any responses to the user. """
with settings(warn_only='true'):
with hide('running', 'stdout', 'stderr', 'warnings'):
yield
def check_true(string):
""" Check if an English string seems to contain truth.
Return a boolean
Default to returning a False value unless truth is found.
"""
string = string.lower()
if string in ['true', 'yes', '1', 'yep', 'yeah']:
return True
else:
return False
|
from fabric.api import hide, settings
from fabric.colors import blue
from contextlib import contextmanager
def header(txt):
"""Decorate a string to make it stand out as a header. """
wrapper = "------------------------------------------------------"
return blue(wrapper + "\n" + txt + "\n" + wrapper, bold=True)
@contextmanager
def mute():
"""Run a fabric command without reporting any responses to the user. """
with settings(warn_only='true'):
with hide('running', 'stdout', 'stderr', 'warnings'):
yield
def check_true(string):
""" Check if an English string seems to contain truth.
Return a boolean
Default to returning a False value unless truth is found.
"""
string = string.lower()
if string in ['true', 'yes', 'y', '1', 'yep', 'yeah']:
return True
else:
return False
|
Add y to the check true function
|
Add y to the check true function
This is an obvious omision.
|
Python
|
mit
|
scimusmn/transit
|
---
+++
@@ -25,7 +25,7 @@
Default to returning a False value unless truth is found.
"""
string = string.lower()
- if string in ['true', 'yes', '1', 'yep', 'yeah']:
+ if string in ['true', 'yes', 'y', '1', 'yep', 'yeah']:
return True
else:
return False
|
f2abc1d265d7eed57223a14009900db7e622d7f6
|
simpleflow/swf/process/worker/dispatch/dynamic_dispatcher.py
|
simpleflow/swf/process/worker/dispatch/dynamic_dispatcher.py
|
# -*- coding: utf-8 -*-
import importlib
from simpleflow.activity import Activity
from .exceptions import DispatchError
class Dispatcher(object):
"""
Dispatch by name, like simpleflow.swf.process.worker.dispatch.by_module.ModuleDispatcher
but without a hierarchy.
"""
@staticmethod
def dispatch_activity(name):
"""
:param name:
:type name: str
:return:
:rtype: Activity
:raise DispatchError: if doesn't exist or not an activity
"""
module_name, activity_name = name.rsplit('.', 1)
module = importlib.import_module(module_name)
activity = getattr(module, activity_name, None)
if not isinstance(activity, Activity):
raise DispatchError()
return activity
|
# -*- coding: utf-8 -*-
import importlib
from simpleflow.activity import Activity
from .exceptions import DispatchError
class Dispatcher(object):
"""
Dispatch by name, like simpleflow.swf.process.worker.dispatch.by_module.ModuleDispatcher
but without a hierarchy.
"""
@staticmethod
def dispatch_activity(name):
"""
:param name:
:type name: str
:return:
:rtype: Activity
:raise DispatchError: if doesn't exist or not an activity
"""
module_name, activity_name = name.rsplit('.', 1)
module = importlib.import_module(module_name)
activity = getattr(module, activity_name, None)
if not isinstance(activity, Activity):
raise DispatchError("dispatching '{}' resulted in: {}".format(name, activity))
return activity
|
Improve error message on DispatchError's
|
Improve error message on DispatchError's
|
Python
|
mit
|
botify-labs/simpleflow,botify-labs/simpleflow
|
---
+++
@@ -25,5 +25,5 @@
module = importlib.import_module(module_name)
activity = getattr(module, activity_name, None)
if not isinstance(activity, Activity):
- raise DispatchError()
+ raise DispatchError("dispatching '{}' resulted in: {}".format(name, activity))
return activity
|
b4207380bc5b8639397e3d1d9a4b70069ef8d6e7
|
hydrachain/__init__.py
|
hydrachain/__init__.py
|
# -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
import re
GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
__version__ = None
try:
_dist = get_distribution('hydrachain')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'hydrachain')):
# not installed, but there is another version that *is*
raise DistributionNotFound
__version__ = _dist.version
except DistributionNotFound:
pass
if not __version__:
try:
rev = subprocess.check_output(['git', 'describe', '--dirty'])
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
except:
pass
if not __version__:
__version__ = 'undefined'
# ########### endversion ##################
|
# -*- coding: utf-8 -*-
# ############# version ##################
from pkg_resources import get_distribution, DistributionNotFound
import os.path
import subprocess
import re
GIT_DESCRIBE_RE = re.compile('^(?P<version>v\d+\.\d+\.\d+)-(?P<git>\d+-g[a-fA-F0-9]+(?:-dirty)?)$')
__version__ = None
try:
_dist = get_distribution('hydrachain')
# Normalize case for Windows systems
dist_loc = os.path.normcase(_dist.location)
here = os.path.normcase(__file__)
if not here.startswith(os.path.join(dist_loc, 'hydrachain')):
# not installed, but there is another version that *is*
raise DistributionNotFound
__version__ = _dist.version
except DistributionNotFound:
pass
if not __version__:
try:
rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'])
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
except:
pass
if not __version__:
__version__ = 'undefined'
# ########### endversion ##################
|
Allow version parsing code to use non-annotated tags
|
Allow version parsing code to use non-annotated tags
|
Python
|
mit
|
HydraChain/hydrachain,wangkangda/hydrachain,HydraChain/hydrachain,wangkangda/hydrachain
|
---
+++
@@ -24,7 +24,7 @@
if not __version__:
try:
- rev = subprocess.check_output(['git', 'describe', '--dirty'])
+ rev = subprocess.check_output(['git', 'describe', '--tags', '--dirty'])
match = GIT_DESCRIBE_RE.match(rev)
if match:
__version__ = "{}+git-{}".format(match.group("version"), match.group("git"))
|
c27020fc832ce9cba8efee72034c32bb7a3e6b30
|
path_and_address/parsing.py
|
path_and_address/parsing.py
|
from .validation import valid_hostname, valid_port
def resolve(path_or_address=None, address=None, *ignored):
"""Returns (path, address) based on consecutive optional arguments, [path] [address]."""
if path_or_address is None or address is not None:
return path_or_address, address
path = None
if split_address(path_or_address)[1] is not None:
address = path_or_address
else:
path = path_or_address
return path, address
def split_address(address):
"""Returns (host, port) with an integer port from the specified address string. (None, None) is returned if the address is invalid."""
invalid = None, None
if not address:
return invalid
components = address.split(':')
if len(components) > 2 or not valid_hostname(components[0]):
return invalid
if len(components) == 2 and not valid_port(components[1]):
return invalid
if len(components) == 1:
components.insert(0 if valid_port(components[0]) else 1, None)
host, port = components
port = int(port) if port else None
return host, port
|
from .validation import valid_hostname, valid_port
def resolve(path_or_address=None, address=None, *ignored):
"""Returns (path, address) based on consecutive optional arguments, [path] [address]."""
if path_or_address is None or address is not None:
return path_or_address, address
path = None
if split_address(path_or_address)[1] is not None:
address = path_or_address
else:
path = path_or_address
return path, address
def split_address(address):
"""Returns (host, port) with an integer port from the specified address string. (None, None) is returned if the address is invalid."""
invalid = None, None
if not address:
return invalid
components = str(address).split(':')
if len(components) > 2 or not valid_hostname(components[0]):
return invalid
if len(components) == 2 and not valid_port(components[1]):
return invalid
if len(components) == 1:
components.insert(0 if valid_port(components[0]) else 1, None)
host, port = components
port = int(port) if port else None
return host, port
|
Allow integer ports in split_address.
|
Bugfix: Allow integer ports in split_address.
|
Python
|
mit
|
joeyespo/path-and-address
|
---
+++
@@ -21,7 +21,7 @@
if not address:
return invalid
- components = address.split(':')
+ components = str(address).split(':')
if len(components) > 2 or not valid_hostname(components[0]):
return invalid
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.