commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
9f80145574cfad56e91df9a598c311894d12a675
|
scratchpad/ncurses.py
|
scratchpad/ncurses.py
|
#!/usr/bin/env python3
from curses import wrapper
import platform
if platform.system() == "Darwin":
# create mock class for Pi Camera
class Camera:
def __init__(self):
self.brightness = 10
self.contrast = 24
else:
import picamera
properties = [
"brightness",
"contrast"
]
camera = Camera()
def main(stdscr):
# clear screen
stdscr.clear()
row = 0
for key in properties:
value = getattr(camera, key)
string = key + ": " + str(value)
stdscr.addstr(row, 0, string)
row = row + 1
stdscr.refresh()
stdscr.getkey()
wrapper(main)
|
#!/usr/bin/env python3
from curses import wrapper
import platform
if platform.system() == "Darwin":
# create mock class for Pi Camera
class PiCamera:
def __init__(self):
self.brightness = 10
self.contrast = 24
else:
from picamera import PiCamera
properties = [
"brightness",
"contrast"
]
camera = PiCamera()
def main(stdscr):
# clear screen
stdscr.clear()
row = 0
for key in properties:
value = getattr(camera, key)
string = key + ": " + str(value)
stdscr.addstr(row, 0, string)
row = row + 1
stdscr.refresh()
stdscr.getkey()
wrapper(main)
|
Fix name of Camera class
|
Fix name of Camera class
|
Python
|
mit
|
gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x
|
---
+++
@@ -5,19 +5,19 @@
if platform.system() == "Darwin":
# create mock class for Pi Camera
- class Camera:
+ class PiCamera:
def __init__(self):
self.brightness = 10
self.contrast = 24
else:
- import picamera
+ from picamera import PiCamera
properties = [
"brightness",
"contrast"
]
-camera = Camera()
+camera = PiCamera()
def main(stdscr):
# clear screen
|
75c1dedb6eddfcb540ee29de5ae31b99d9927d07
|
reddit/admin.py
|
reddit/admin.py
|
from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'is_valid')
search_fields = ['username', 'user']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
|
from django.contrib import admin
from reddit.models import RedditAccount
from reddit.forms import RedditAccountForm
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'validated', 'is_valid')
search_fields = ['username']
fields = ('user', 'username')
form = RedditAccountForm
def is_valid(self, obj):
if not obj.date_created:
return False
# Account 3 months old?
if (date.today() - obj.date_created.date()).days >= 90:
return True
# Account created after 9/2/10 and before 13/2/10
if obj.date_created.date() >= date(2010, 2, 9) and obj.date_created.date() <= date(2010, 2, 13):
return True
return False
is_valid.short_description = 'Dreddit Eligible'
is_valid.boolean = True
def save_model(self, request, obj, form, change):
obj.api_update()
obj.save()
admin.site.register(RedditAccount, RedditAccountAdmin)
|
Add validation details to the Admin interface
|
Add validation details to the Admin interface
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
---
+++
@@ -5,8 +5,8 @@
from datetime import date
class RedditAccountAdmin(admin.ModelAdmin):
- list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'is_valid')
- search_fields = ['username', 'user']
+ list_display = ('username', 'user', 'date_created', 'link_karma', 'comment_karma', 'last_update', 'validated', 'is_valid')
+ search_fields = ['username']
fields = ('user', 'username')
|
ca09f3e4286be605e179f0b6ac742305d165b431
|
monasca_setup/detection/plugins/http_check.py
|
monasca_setup/detection/plugins/http_check.py
|
import logging
import monasca_setup.agent_config
import monasca_setup.detection
log = logging.getLogger(__name__)
class HttpCheck(monasca_setup.detection.ArgsPlugin):
""" Setup an http_check according to the passed in args.
Despite being a detection plugin this plugin does no detection and will be a noop without arguments.
Expects space seperated arguments, the required argument is url. Optional parameters include:
disable_ssl_validation and match_pattern.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
self.available = self._check_required_args(['url'])
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monasca_setup.agent_config.Plugins()
log.info("\tEnabling the http_check plugin for {url}".format(**self.args))
# No support for setting headers at this time
instance = self._build_instance(['url', 'timeout', 'username', 'password', 'match_pattern',
'disable_ssl_validation'])
instance['name'] = self.args['url']
instance['collect_response_time'] = True
config['http_check'] = {'init_config': None, 'instances': [instance]}
return config
|
import ast
import logging
import monasca_setup.agent_config
import monasca_setup.detection
log = logging.getLogger(__name__)
class HttpCheck(monasca_setup.detection.ArgsPlugin):
""" Setup an http_check according to the passed in args.
Despite being a detection plugin this plugin does no detection and will be a noop without arguments.
Expects space seperated arguments, the required argument is url. Optional parameters include:
disable_ssl_validation and match_pattern.
"""
def _detect(self):
"""Run detection, set self.available True if the service is detected.
"""
self.available = self._check_required_args(['url'])
def build_config(self):
"""Build the config as a Plugins object and return.
"""
config = monasca_setup.agent_config.Plugins()
log.info("\tEnabling the http_check plugin for {url}".format(**self.args))
# No support for setting headers at this time
instance = self._build_instance(['url', 'timeout', 'username', 'password',
'match_pattern', 'disable_ssl_validation',
'name', 'use_keystone', 'collect_response_time'])
# Normalize any boolean parameters
for param in ['use_keystone', 'collect_response_time']:
if param in self.args:
instance[param] = ast.literal_eval(self.args[param].capitalize())
# Set some defaults
if 'collect_response_time' not in instance:
instance['collect_response_time'] = True
if 'name' not in instance:
instance['name'] = self.args['url']
config['http_check'] = {'init_config': None, 'instances': [instance]}
return config
|
Allow additional customization of HttpCheck
|
Allow additional customization of HttpCheck
Documentation on HttpCheck detection plugin refers to things that do
not currently work in the plugin, like activating use_keystone. This
change fixes that, and adds the ability to customize other http_check
parameters which were missing.
Change-Id: I2309b25f83f395dcd56914ba7cdfeac9b42c7481
|
Python
|
bsd-3-clause
|
sapcc/monasca-agent,sapcc/monasca-agent,sapcc/monasca-agent
|
---
+++
@@ -1,3 +1,4 @@
+import ast
import logging
import monasca_setup.agent_config
@@ -25,12 +26,20 @@
log.info("\tEnabling the http_check plugin for {url}".format(**self.args))
# No support for setting headers at this time
- instance = self._build_instance(['url', 'timeout', 'username', 'password', 'match_pattern',
- 'disable_ssl_validation'])
- instance['name'] = self.args['url']
- instance['collect_response_time'] = True
+ instance = self._build_instance(['url', 'timeout', 'username', 'password',
+ 'match_pattern', 'disable_ssl_validation',
+ 'name', 'use_keystone', 'collect_response_time'])
+
+ # Normalize any boolean parameters
+ for param in ['use_keystone', 'collect_response_time']:
+ if param in self.args:
+ instance[param] = ast.literal_eval(self.args[param].capitalize())
+ # Set some defaults
+ if 'collect_response_time' not in instance:
+ instance['collect_response_time'] = True
+ if 'name' not in instance:
+ instance['name'] = self.args['url']
config['http_check'] = {'init_config': None, 'instances': [instance]}
return config
-
|
a61d50ff6f564112c04d3a9a8ac6e57d5b99da9d
|
heufybot/output.py
|
heufybot/output.py
|
class OutputHandler(object):
def __init__(self, connection):
self.connection = connection
def cmdJOIN(self, channels, keys=None):
chans = channels.split(",")
for i in range(len(chans)):
if chans[i][0] not in self.connection.supportHelper.chanTypes:
chans[i] = "#{}".format(chans[i])
channels = ",".join(chans)
if keys:
self.connection.sendMessage("JOIN", channels, keys)
else:
self.connection.sendMessage("JOIN", channels)
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
def cmdPING(self, message):
self.connection.sendMessage("PING", ":{}".format(message))
def cmdPONG(self, message):
self.connection.sendMessage("PONG", ":{}".format(message))
def cmdQUIT(self, reason):
self.connection.sendMessage("QUIT", ":{}".format(reason))
def cmdUSER(self, ident, gecos):
# RFC2812 allows usermodes to be set, but this isn't implemented much in IRCds at all.
# Pass 0 for usermodes instead.
self.connection.sendMessage("USER", ident, "0", "*", ":{}".format(gecos))
|
class OutputHandler(object):
def __init__(self, connection):
self.connection = connection
def cmdJOIN(self, channels, keys=[]):
for i in range(len(channels)):
if channels[i][0] not in self.connection.supportHelper.chanTypes:
channels[i] = "#{}".format(channels[i])
self.connection.sendMessage("JOIN", ",".join(channels), ",".join(keys))
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
def cmdPING(self, message):
self.connection.sendMessage("PING", ":{}".format(message))
def cmdPONG(self, message):
self.connection.sendMessage("PONG", ":{}".format(message))
def cmdQUIT(self, reason):
self.connection.sendMessage("QUIT", ":{}".format(reason))
def cmdUSER(self, ident, gecos):
# RFC2812 allows usermodes to be set, but this isn't implemented much in IRCds at all.
# Pass 0 for usermodes instead.
self.connection.sendMessage("USER", ident, "0", "*", ":{}".format(gecos))
|
Use lists for the JOIN command and parse them before sending
|
Use lists for the JOIN command and parse them before sending
|
Python
|
mit
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
---
+++
@@ -2,16 +2,11 @@
def __init__(self, connection):
self.connection = connection
- def cmdJOIN(self, channels, keys=None):
- chans = channels.split(",")
- for i in range(len(chans)):
- if chans[i][0] not in self.connection.supportHelper.chanTypes:
- chans[i] = "#{}".format(chans[i])
- channels = ",".join(chans)
- if keys:
- self.connection.sendMessage("JOIN", channels, keys)
- else:
- self.connection.sendMessage("JOIN", channels)
+ def cmdJOIN(self, channels, keys=[]):
+ for i in range(len(channels)):
+ if channels[i][0] not in self.connection.supportHelper.chanTypes:
+ channels[i] = "#{}".format(channels[i])
+ self.connection.sendMessage("JOIN", ",".join(channels), ",".join(keys))
def cmdNICK(self, nick):
self.connection.sendMessage("NICK", nick)
|
980492cb76d0d72a005269a4fb9c1ec9767c10de
|
symfit/api.py
|
symfit/api.py
|
# Overwrite behavior of sympy objects to make more sense for this project.
import symfit.core.operators
# Expose useful objects.
from symfit.core.fit import (
Fit, Model, Constraint, ODEModel, ModelError, CallableModel,
CallableNumericalModel, GradientModel
)
from symfit.core.fit_results import FitResults
from symfit.core.argument import Variable, Parameter
from symfit.core.support import variables, parameters, D
# Expose the sympy API
from sympy import *
|
# Overwrite behavior of sympy objects to make more sense for this project.
import symfit.core.operators
# Expose useful objects.
from symfit.core.fit import (
Fit, Model, ODEModel, ModelError, CallableModel,
CallableNumericalModel, GradientModel
)
from symfit.core.fit_results import FitResults
from symfit.core.argument import Variable, Parameter
from symfit.core.support import variables, parameters, D
# Expose the sympy API
from sympy import *
|
Remove Constraint objects from the API
|
Remove Constraint objects from the API
|
Python
|
mit
|
tBuLi/symfit
|
---
+++
@@ -3,7 +3,7 @@
# Expose useful objects.
from symfit.core.fit import (
- Fit, Model, Constraint, ODEModel, ModelError, CallableModel,
+ Fit, Model, ODEModel, ModelError, CallableModel,
CallableNumericalModel, GradientModel
)
from symfit.core.fit_results import FitResults
|
d9334aee00ba0f7f7b6423d775a65ba6f40ac4d4
|
test_stack.py
|
test_stack.py
|
from stack import StackItem
from stack import StackFrame
import pytest
def test_item_data():
# Tests that "Bacon" is returned when calling .data on item
bacon = StackItem("Bacon")
assert bacon.data == "Bacon"
def test_stack_push():
# Tests that "Bacon" is first item when pushed to stack
bacon = StackItem("Bacon")
new_stack = StackFrame()
new_stack.push(bacon)
assert new_stack.first_item.data == "Bacon"
def test_stack_pop():
# Tests that "Bacon" is returned when it is popped from stack
bacon = StackItem("Bacon")
new_stack = StackFrame()
new_stack.push(bacon)
assert new_stack.pop() == "Bacon"
def test_empty_stack_pop():
# Tests that pop() on empty stack returns ValueError
new_stack = StackFrame()
with pytest.raises(ValueError):
new_stack.pop()
|
Add initial push and pop tests for Stack
|
Add initial push and pop tests for Stack
|
Python
|
mit
|
jwarren116/data-structures
|
---
+++
@@ -0,0 +1,32 @@
+from stack import StackItem
+from stack import StackFrame
+import pytest
+
+
+def test_item_data():
+ # Tests that "Bacon" is returned when calling .data on item
+ bacon = StackItem("Bacon")
+ assert bacon.data == "Bacon"
+
+
+def test_stack_push():
+ # Tests that "Bacon" is first item when pushed to stack
+ bacon = StackItem("Bacon")
+ new_stack = StackFrame()
+ new_stack.push(bacon)
+ assert new_stack.first_item.data == "Bacon"
+
+
+def test_stack_pop():
+ # Tests that "Bacon" is returned when it is popped from stack
+ bacon = StackItem("Bacon")
+ new_stack = StackFrame()
+ new_stack.push(bacon)
+ assert new_stack.pop() == "Bacon"
+
+
+def test_empty_stack_pop():
+ # Tests that pop() on empty stack returns ValueError
+ new_stack = StackFrame()
+ with pytest.raises(ValueError):
+ new_stack.pop()
|
|
81cd197e95e89dd37797c489774f34496ecea259
|
server/pushlanding.py
|
server/pushlanding.py
|
import logging
import os
from django.http import HttpResponse, Http404
from django.views.decorators.csrf import csrf_exempt
from twilio.rest import TwilioRestClient
logger = logging.getLogger('django')
@csrf_exempt
def handle(request):
if (request.method != 'POST'):
raise Http404
return HttpResponse("Hello, world. You're at the push page.")
def testSms(request):
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = os.environ['TWILIO_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = TwilioRestClient(account_sid, auth_token)
message = client.messages.create(
body="Jenny please?! I love you <3",
to="+19172679225", # Replace with your phone number
from_=os.environ['TWILIO_PHONE']
)
logger.info(message.sid)
return HttpResponse(message.sid)
|
import logging
import os
import json
from django.http import HttpResponse, Http404
from django.views.decorators.csrf import csrf_exempt
from twilio.rest import TwilioRestClient
logger = logging.getLogger('django')
@csrf_exempt
def handle(request):
if (request.method != 'POST'):
raise Http404
logger.info("Received a push notification")
rawCheckin = request.POST['checkin']
checkin = json.loads(rawCheckin)
logger.info(rawCheckin)
return HttpResponse("Hello, world. You're at the push page.")
def testSms(request):
# Your Account Sid and Auth Token from twilio.com/user/account
account_sid = os.environ['TWILIO_SID']
auth_token = os.environ['TWILIO_AUTH_TOKEN']
client = TwilioRestClient(account_sid, auth_token)
message = client.messages.create(
body="Jenny please?! I love you <3",
to="+19172679225", # Replace with your phone number
from_=os.environ['TWILIO_PHONE']
)
logger.info(message.sid)
return HttpResponse(message.sid)
|
Add debug logging for push landing
|
Add debug logging for push landing
|
Python
|
mit
|
zackzachariah/scavenger,zackzachariah/scavenger
|
---
+++
@@ -1,5 +1,6 @@
import logging
import os
+import json
from django.http import HttpResponse, Http404
from django.views.decorators.csrf import csrf_exempt
@@ -11,6 +12,10 @@
def handle(request):
if (request.method != 'POST'):
raise Http404
+ logger.info("Received a push notification")
+ rawCheckin = request.POST['checkin']
+ checkin = json.loads(rawCheckin)
+ logger.info(rawCheckin)
return HttpResponse("Hello, world. You're at the push page.")
def testSms(request):
|
9729a77b9b8cbfe8a6960ded4b5931e3ed64fe10
|
discover/__init__.py
|
discover/__init__.py
|
import logging
LOG_FORMAT = '%(asctime)s [%(name)s] %(levelname)s %(message)s'
LOG_DATE = '%Y-%m-%d %I:%M:%S %p'
logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=logging.WARN)
logger = logging.getLogger('yoda-discover')
logger.level = logging.INFO
|
import logging
LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s'
logging.basicConfig(format=LOG_FORMAT, level=logging.WARN)
logger = logging.getLogger('yoda-discover')
logger.level = logging.INFO
|
Remove date from log formatting (handled by syslog)
|
Remove date from log formatting (handled by syslog)
|
Python
|
mit
|
totem/yoda-discover
|
---
+++
@@ -1,9 +1,7 @@
import logging
-LOG_FORMAT = '%(asctime)s [%(name)s] %(levelname)s %(message)s'
-LOG_DATE = '%Y-%m-%d %I:%M:%S %p'
+LOG_FORMAT = '[%(name)s] %(levelname)s %(message)s'
-
-logging.basicConfig(format=LOG_FORMAT, datefmt=LOG_DATE, level=logging.WARN)
+logging.basicConfig(format=LOG_FORMAT, level=logging.WARN)
logger = logging.getLogger('yoda-discover')
logger.level = logging.INFO
|
99b72ab4e40a4ffca901b36d870947ffb5103da8
|
HadithHouseWebsite/textprocessing/regex.py
|
HadithHouseWebsite/textprocessing/regex.py
|
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document):
prev_match = None
prev_type = None
for curr_match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if curr_match.group(type) is not None:
self.callback(
type,
prev_type,
curr_match,
prev_match,
document
)
break
|
import re
class DocScanner(object):
"""
A class used to find certain tokens in a given document. The tokens can be
specified by regular expressions.
"""
def __init__(self, tokens_dict, callback):
"""
Initialize a new document scanner.
:param tokens_dict: A dictionary whose keys are the types of tokens and
values are the regex for finding such types of tokens.
:param callback: A function to be called whenever a token is found.
"""
self.types = list(tokens_dict.keys())
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
def scan(self, document, context=None):
prev_match = None
prev_type = None
for match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
if match.group(type) is not None:
self.callback(
type,
prev_type,
match,
prev_match,
document,
context
)
prev_type = type
prev_match = match
break
|
Support passing context to callback
|
feat(docscanner): Support passing context to callback
It might be useful to send some additional parameters to the callback
function. For example, you might want to write to a file in the
callback. This commit allows the user to pass an optional context to
the callback everytime it finds a match.
|
Python
|
mit
|
hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse
|
---
+++
@@ -18,17 +18,20 @@
self.scanning_regex = '|'.join(['(?P<%s>%s)' % (type, regex) for type, regex in tokens_dict.items()])
self.callback = callback
- def scan(self, document):
+ def scan(self, document, context=None):
prev_match = None
prev_type = None
- for curr_match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
+ for match in re.finditer(self.scanning_regex, document, flags=re.MULTILINE):
for type in self.types:
- if curr_match.group(type) is not None:
+ if match.group(type) is not None:
self.callback(
type,
prev_type,
- curr_match,
+ match,
prev_match,
- document
+ document,
+ context
)
+ prev_type = type
+ prev_match = match
break
|
114382ff9b6dad3c9ba621014dd7cd63ad49bef6
|
django/santropolFeast/meal/models.py
|
django/santropolFeast/meal/models.py
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Meal(models.Model):
class Meta:
verbose_name_plural = _('meals')
# Meal information
nom = models.CharField(max_length=50, verbose_name=_('name'))
description = models.TextField(verbose_name=_('description'))
ingredients = models.ManyToManyField(
'meal.Ingredient',
related_name='related_meals'
)
class Ingredient(models.Model):
class Meta:
verbose_name_plural = _('ingredients')
# Ingredient information
nom = models.CharField(max_length=50, verbose_name=_('name'))
class Allergy(models.Model):
class Meta:
verbose_name_plural = _('allergies')
# Allergy information
nom = models.CharField(max_length=50, verbose_name=_('name'))
description = models.TextField(verbose_name=_('description'))
ingredients = models.ManyToManyField(
'meal.Ingredient',
related_name='related_allergies'
)
|
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Meal(models.Model):
class Meta:
verbose_name_plural = _('meals')
# Meal information
nom = models.CharField(max_length=50, verbose_name=_('name'))
description = models.TextField(verbose_name=_('description'))
ingredients = models.ManyToManyField(
'meal.Ingredient',
related_name='related_meals'
)
def __str__( self ):
return self.nom
class Ingredient(models.Model):
class Meta:
verbose_name_plural = _('ingredients')
# Ingredient information
nom = models.CharField(max_length=50, verbose_name=_('name'))
def __str__( self ):
return self.nom
class Allergy(models.Model):
class Meta:
verbose_name_plural = _('allergies')
# Allergy information
nom = models.CharField(max_length=50, verbose_name=_('name'))
description = models.TextField(verbose_name=_('description'))
ingredients = models.ManyToManyField(
'meal.Ingredient',
related_name='related_allergies'
)
def __str__( self ):
return self.nom
|
Use string representation for objects
|
Use string representation for objects
|
Python
|
agpl-3.0
|
savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,savoirfairelinux/santropol-feast,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/sous-chef,madmath/sous-chef,savoirfairelinux/santropol-feast,madmath/sous-chef
|
---
+++
@@ -15,6 +15,9 @@
related_name='related_meals'
)
+ def __str__( self ):
+ return self.nom
+
class Ingredient(models.Model):
@@ -24,6 +27,8 @@
# Ingredient information
nom = models.CharField(max_length=50, verbose_name=_('name'))
+ def __str__( self ):
+ return self.nom
class Allergy(models.Model):
@@ -37,3 +42,6 @@
'meal.Ingredient',
related_name='related_allergies'
)
+
+ def __str__( self ):
+ return self.nom
|
31eadf6cdaf70621941a6c5d269ed33f46e27cd7
|
check.py
|
check.py
|
"""
This script can be used to check if TM1py can connect to your TM1 instance
"""
import getpass
from distutils.util import strtobool
from TM1py.Services import TM1Service
# Parameters for connection
user = input("TM1 User (leave empty if SSO): ")
password = getpass.getpass("Password (cmd doesn't show input, leave empty if SSO): ")
namespace = input("CAM Namespace (leave empty if no CAM Security): ")
address = input("Address (leave empty if localhost): ")
gateway = input("ClientCAMURI (leave empty if no SSO): ")
port = input("HTTP Port: ")
ssl = strtobool(input("SSL (True or False): "))
if len(namespace.strip()) == 0:
namespace = None
if len(gateway.strip()) == 0:
gateway = None
with TM1Service(
address=address,
port=port,
user=user,
password=password,
namespace=namespace,
gateway=gateway,
ssl=ssl) as tm1:
server_name = tm1.server.get_server_name()
print("Connection to TM1 established!! your Servername is: {}".format(server_name))
|
"""
This script can be used to check if TM1py can connect to your TM1 instance
"""
import getpass
from distutils.util import strtobool
from TM1py.Services import TM1Service
# Parameters for connection
user = input("TM1 User (leave empty if SSO): ")
password = getpass.getpass("Password (leave empty if SSO): ")
namespace = input("CAM Namespace (leave empty if no CAM Security): ")
address = input("Address (leave empty if localhost): ") or "localhost"
gateway = input("ClientCAMURI (leave empty if no SSO): ")
port = input("HTTP Port (Default 5000): ") or "5000"
ssl = strtobool(input("SSL (Default T or F): ") or "T")
if len(namespace.strip()) == 0:
namespace = None
if len(gateway.strip()) == 0:
gateway = None
try:
with TM1Service(
address=address,
port=port,
user=user,
password=password,
namespace=namespace,
gateway=gateway,
ssl=ssl) as tm1:
server_name = tm1.server.get_server_name()
print("Connection to TM1 established!! your Servername is: {}".format(server_name))
except Exception as e:
print("\nERROR:")
print("\t" + str(e))
|
Return error message instead of stack and add defaults to input
|
Return error message instead of stack and add defaults to input
|
Python
|
mit
|
cubewise-code/TM1py-samples
|
---
+++
@@ -9,12 +9,12 @@
# Parameters for connection
user = input("TM1 User (leave empty if SSO): ")
-password = getpass.getpass("Password (cmd doesn't show input, leave empty if SSO): ")
+password = getpass.getpass("Password (leave empty if SSO): ")
namespace = input("CAM Namespace (leave empty if no CAM Security): ")
-address = input("Address (leave empty if localhost): ")
+address = input("Address (leave empty if localhost): ") or "localhost"
gateway = input("ClientCAMURI (leave empty if no SSO): ")
-port = input("HTTP Port: ")
-ssl = strtobool(input("SSL (True or False): "))
+port = input("HTTP Port (Default 5000): ") or "5000"
+ssl = strtobool(input("SSL (Default T or F): ") or "T")
if len(namespace.strip()) == 0:
namespace = None
@@ -22,7 +22,8 @@
if len(gateway.strip()) == 0:
gateway = None
-with TM1Service(
+try:
+ with TM1Service(
address=address,
port=port,
user=user,
@@ -32,3 +33,7 @@
ssl=ssl) as tm1:
server_name = tm1.server.get_server_name()
print("Connection to TM1 established!! your Servername is: {}".format(server_name))
+except Exception as e:
+ print("\nERROR:")
+ print("\t" + str(e))
+
|
1addfaecb6210054480aa3c1c2a42878f526e1ed
|
axes/management/commands/axes_reset.py
|
axes/management/commands/axes_reset.py
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='+')
def handle(self, *args, **kwargs):
count = 0
if kwargs:
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
from django.core.management.base import BaseCommand
from axes.utils import reset
class Command(BaseCommand):
help = ("resets any lockouts or failed login records. If called with an "
"IP, resets only for that IP")
def add_arguments(self, parser):
parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
if kwargs:
for ip in kwargs['ip']:
count += reset(ip=ip)
else:
count = reset()
if count:
print('{0} attempts removed.'.format(count))
else:
print('No attempts found.')
|
Make ip positional argument optional
|
Make ip positional argument optional
|
Python
|
mit
|
jazzband/django-axes,svenhertle/django-axes,django-pci/django-axes
|
---
+++
@@ -8,7 +8,7 @@
"IP, resets only for that IP")
def add_arguments(self, parser):
- parser.add_argument('ip', nargs='+')
+ parser.add_argument('ip', nargs='*')
def handle(self, *args, **kwargs):
count = 0
|
759e22f8d629f76d7fca0d0567603c9ae6835fa6
|
api_v3/serializers/profile.py
|
api_v3/serializers/profile.py
|
from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': settings.MEMBER_CENTERS,
'expense_scopes': settings.EXPENSE_SCOPES
}
|
from django.conf import settings
from rest_framework import fields
from rest_framework_json_api import serializers
from api_v3.models import Profile, Ticket
class ProfileSerializer(serializers.ModelSerializer):
tickets_count = fields.SerializerMethodField()
class Meta:
model = Profile
read_only_fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'locale'
)
fields = (
'id',
'email',
'first_name',
'last_name',
'is_staff',
'is_superuser',
'bio',
'locale',
'tickets_count'
)
def get_tickets_count(self, obj):
if obj.is_superuser:
return Ticket.objects.count()
else:
return Ticket.filter_by_user(obj).count()
def to_representation(self, obj):
request = self.context.get('request', None)
data = super(ProfileSerializer, self).to_representation(obj)
if request and request.user and request.user.is_superuser:
return data
# For regular users, make sure others email is not displayed
if request and request.user != obj:
data.pop('email')
return data
# Adds extra application related metas.
def get_root_meta(self, resource, many):
if not self.context.get('add_misc', None):
return {}
return {
'member_centers': sorted(settings.MEMBER_CENTERS),
'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
|
Return sorted member centers and expense scopes.
|
Return sorted member centers and expense scopes.
|
Python
|
mit
|
occrp/id-backend
|
---
+++
@@ -58,6 +58,6 @@
return {}
return {
- 'member_centers': settings.MEMBER_CENTERS,
- 'expense_scopes': settings.EXPENSE_SCOPES
+ 'member_centers': sorted(settings.MEMBER_CENTERS),
+ 'expense_scopes': sorted(settings.EXPENSE_SCOPES)
}
|
ecfa18b7f05a23bdc6beab705dc748559eef2873
|
lockdown/decorators.py
|
lockdown/decorators.py
|
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
def lockdown(*args, **kwargs):
"""Define a decorator based on the LockdownMiddleware.
This decorator takes the same arguments as the middleware, but allows a
more granular locking than the middleware.
"""
return decorator_from_middleware_with_args(LockdownMiddleware)(*args,
**kwargs)
|
"""Provide a decorator based on the LockdownMiddleware.
This module provides a decorator that takes the same arguments as the
middleware, but allows more granular locking than the middleware.
"""
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
lockdown = decorator_from_middleware_with_args(LockdownMiddleware)
|
Remove wrapping of decorator in a func
|
Remove wrapping of decorator in a func
Growing older, growing wiser ...
This removes the unnecesary wrapping of the decorator in a function
introduced in e4a04c6, as it's not necessary and is less performant than
without.
|
Python
|
bsd-3-clause
|
Dunedan/django-lockdown,Dunedan/django-lockdown
|
---
+++
@@ -1,13 +1,10 @@
+"""Provide a decorator based on the LockdownMiddleware.
+
+This module provides a decorator that takes the same arguments as the
+middleware, but allows more granular locking than the middleware.
+"""
from django.utils.decorators import decorator_from_middleware_with_args
from lockdown.middleware import LockdownMiddleware
-
-def lockdown(*args, **kwargs):
- """Define a decorator based on the LockdownMiddleware.
-
- This decorator takes the same arguments as the middleware, but allows a
- more granular locking than the middleware.
- """
- return decorator_from_middleware_with_args(LockdownMiddleware)(*args,
- **kwargs)
+lockdown = decorator_from_middleware_with_args(LockdownMiddleware)
|
23604efc203f62f1059c4bd18f233cccdaf045e6
|
server/app_factory/create_app.py
|
server/app_factory/create_app.py
|
import wtforms_json
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
db = SQLAlchemy()
def create_app():
wtforms_json.init()
# Define the WSGI Application object
app = Flask(
__name__,
template_folder="../../",
static_folder="../../static"
)
# Configurations
app.config.from_object('config')
db.init_app(app)
with app.test_request_context():
db.create_all()
return app
|
import wtforms_json
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
# Create db object so it can be shared throughout the application
db = SQLAlchemy()
# Create the login manager to be shared throughout the application
login_manager = LoginManager()
def create_app():
"""Creates and returns the Flask WSGI application
and initializes helping components"""
# Initialize json support for wtforms
wtforms_json.init()
# Define the WSGI Application object
app = Flask(
__name__,
template_folder="../../",
static_folder="../../static"
)
# Configurations
app.config.from_object('config')
# Initialize database with application
db.init_app(app)
with app.test_request_context():
db.create_all()
# Initialize login manager with application
login_manager.init_app(app)
return app
|
Add login manager initialization to app creation method
|
Add login manager initialization to app creation method
|
Python
|
mit
|
ganemone/ontheside,ganemone/ontheside,ganemone/ontheside
|
---
+++
@@ -1,12 +1,19 @@
import wtforms_json
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
+from flask.ext.login import LoginManager
+# Create db object so it can be shared throughout the application
db = SQLAlchemy()
+
+# Create the login manager to be shared throughout the application
+login_manager = LoginManager()
def create_app():
-
+ """Creates and returns the Flask WSGI application
+ and initializes helping components"""
+ # Initialize json support for wtforms
wtforms_json.init()
# Define the WSGI Application object
@@ -19,8 +26,12 @@
# Configurations
app.config.from_object('config')
+ # Initialize database with application
db.init_app(app)
with app.test_request_context():
db.create_all()
+ # Initialize login manager with application
+ login_manager.init_app(app)
+
return app
|
2d35e48b68ff51fae09369b4a1a00d7599c454c1
|
common/djangoapps/util/json_request.py
|
common/djangoapps/util/json_request.py
|
from functools import wraps
import copy
import json
def expect_json(view_function):
@wraps(view_function)
def expect_json_with_cloned_request(request, *args, **kwargs):
if request.META['CONTENT_TYPE'] == "application/json":
cloned_request = copy.copy(request)
cloned_request.POST = cloned_request.POST.copy()
cloned_request.POST.update(json.loads(request.body))
return view_function(cloned_request, *args, **kwargs)
else:
return view_function(request, *args, **kwargs)
return expect_json_with_cloned_request
|
from functools import wraps
import copy
import json
def expect_json(view_function):
@wraps(view_function)
def expect_json_with_cloned_request(request, *args, **kwargs):
# cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information
# e.g. 'charset', so we can't do a direct string compare
if request.META['CONTENT_TYPE'].lower().startswith("application/json"):
cloned_request = copy.copy(request)
cloned_request.POST = cloned_request.POST.copy()
cloned_request.POST.update(json.loads(request.body))
return view_function(cloned_request, *args, **kwargs)
else:
return view_function(request, *args, **kwargs)
return expect_json_with_cloned_request
|
Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.
|
Fix JSON postback error where the content-type header line can contain more info than just the application/json descriptor. Now we just to a compare on the start of the header value.
|
Python
|
agpl-3.0
|
10clouds/edx-platform,chrisndodge/edx-platform,beacloudgenius/edx-platform,ahmadio/edx-platform,devs1991/test_edx_docmode,jzoldak/edx-platform,rismalrv/edx-platform,beacloudgenius/edx-platform,kmoocdev2/edx-platform,vismartltd/edx-platform,beni55/edx-platform,auferack08/edx-platform,adoosii/edx-platform,ferabra/edx-platform,vasyarv/edx-platform,cecep-edu/edx-platform,JCBarahona/edX,EDUlib/edx-platform,analyseuc3m/ANALYSE-v1,nanolearningllc/edx-platform-cypress,jswope00/GAI,zadgroup/edx-platform,BehavioralInsightsTeam/edx-platform,hastexo/edx-platform,OmarIthawi/edx-platform,ampax/edx-platform-backup,jbzdak/edx-platform,shubhdev/openedx,TeachAtTUM/edx-platform,bitifirefly/edx-platform,openfun/edx-platform,bitifirefly/edx-platform,pomegranited/edx-platform,ahmedaljazzar/edx-platform,IONISx/edx-platform,itsjeyd/edx-platform,arifsetiawan/edx-platform,mjirayu/sit_academy,dcosentino/edx-platform,beacloudgenius/edx-platform,mushtaqak/edx-platform,UOMx/edx-platform,10clouds/edx-platform,Livit/Livit.Learn.EdX,y12uc231/edx-platform,cpennington/edx-platform,pomegranited/edx-platform,valtech-mooc/edx-platform,appliedx/edx-platform,itsjeyd/edx-platform,kursitet/edx-platform,eestay/edx-platform,kursitet/edx-platform,apigee/edx-platform,marcore/edx-platform,Edraak/circleci-edx-platform,vasyarv/edx-platform,SivilTaram/edx-platform,pku9104038/edx-platform,zhenzhai/edx-platform,kxliugang/edx-platform,ZLLab-Mooc/edx-platform,a-parhom/edx-platform,jruiperezv/ANALYSE,procangroup/edx-platform,vismartltd/edx-platform,IONISx/edx-platform,edx/edx-platform,nanolearning/edx-platform,jelugbo/tundex,cyanna/edx-platform,vismartltd/edx-platform,beni55/edx-platform,chand3040/cloud_that,cpennington/edx-platform,kalebhartje/schoolboost,zubair-arbi/edx-platform,eestay/edx-platform,shashank971/edx-platform,zofuthan/edx-platform,proversity-org/edx-platform,abdoosh00/edraak,y12uc231/edx-platform,openfun/edx-platform,jswope00/GAI,mjg2203/edx-platform-seas,ZLLab-Mooc/edx-platform,Edraak/edx-platform,OmarIthawi/edx-platform,unicri/edx-platform,sudheerchintala/LearnEraPlatForm,apigee/edx-platform,zadgroup/edx-platform,adoosii/edx-platform,eduNEXT/edx-platform,knehez/edx-platform,jbzdak/edx-platform,jswope00/GAI,nagyistoce/edx-platform,torchingloom/edx-platform,SravanthiSinha/edx-platform,tanmaykm/edx-platform,Shrhawk/edx-platform,teltek/edx-platform,doganov/edx-platform,EduPepperPDTesting/pepper2013-testing,andyzsf/edx,xuxiao19910803/edx,eduNEXT/edunext-platform,Edraak/circleci-edx-platform,MSOpenTech/edx-platform,unicri/edx-platform,martynovp/edx-platform,B-MOOC/edx-platform,MakeHer/edx-platform,MSOpenTech/edx-platform,nanolearningllc/edx-platform-cypress,jbassen/edx-platform,nikolas/edx-platform,fintech-circle/edx-platform,eestay/edx-platform,chand3040/cloud_that,hamzehd/edx-platform,louyihua/edx-platform,mbareta/edx-platform-ft,MSOpenTech/edx-platform,apigee/edx-platform,Softmotions/edx-platform,vasyarv/edx-platform,rismalrv/edx-platform,gsehub/edx-platform,mushtaqak/edx-platform,nikolas/edx-platform,synergeticsedx/deployment-wipro,alexthered/kienhoc-platform,gsehub/edx-platform,pabloborrego93/edx-platform,nanolearning/edx-platform,doganov/edx-platform,shubhdev/edx-platform,DNFcode/edx-platform,chauhanhardik/populo,jjmiranda/edx-platform,defance/edx-platform,WatanabeYasumasa/edx-platform,DNFcode/edx-platform,nanolearning/edx-platform,nttks/edx-platform,hastexo/edx-platform,jruiperezv/ANALYSE,ovnicraft/edx-platform,zerobatu/edx-platform,hkawasaki/kawasaki-aio8-0,JCBarahona/edX,knehez/edx-platform,benpatterson/edx-platform,MakeHer/edx-platform,motion2015/a3,caesar2164/edx-platform,chauhanhardik/populo,Endika/edx-platform,ahmadiga/min_edx,CredoReference/edx-platform,alu042/edx-platform,peterm-itr/edx-platform,pomegranited/edx-platform,andyzsf/edx,kalebhartje/schoolboost,valtech-mooc/edx-platform,motion2015/edx-platform,Semi-global/edx-platform,romain-li/edx-platform,dcosentino/edx-platform,zadgroup/edx-platform,zofuthan/edx-platform,waheedahmed/edx-platform,cselis86/edx-platform,cselis86/edx-platform,JioEducation/edx-platform,raccoongang/edx-platform,dsajkl/123,fintech-circle/edx-platform,benpatterson/edx-platform,UXE/local-edx,simbs/edx-platform,kamalx/edx-platform,edry/edx-platform,Softmotions/edx-platform,morenopc/edx-platform,mjg2203/edx-platform-seas,defance/edx-platform,torchingloom/edx-platform,marcore/edx-platform,xinjiguaike/edx-platform,ahmadio/edx-platform,msegado/edx-platform,nttks/edx-platform,PepperPD/edx-pepper-platform,PepperPD/edx-pepper-platform,proversity-org/edx-platform,chrisndodge/edx-platform,kmoocdev2/edx-platform,shashank971/edx-platform,tiagochiavericosta/edx-platform,zofuthan/edx-platform,halvertoluke/edx-platform,jazztpt/edx-platform,nanolearningllc/edx-platform-cypress,jbassen/edx-platform,jonathan-beard/edx-platform,motion2015/a3,pepeportela/edx-platform,4eek/edx-platform,TsinghuaX/edx-platform,syjeon/new_edx,kursitet/edx-platform,y12uc231/edx-platform,mcgachey/edx-platform,sameetb-cuelogic/edx-platform-test,auferack08/edx-platform,utecuy/edx-platform,SravanthiSinha/edx-platform,J861449197/edx-platform,olexiim/edx-platform,ESOedX/edx-platform,B-MOOC/edx-platform,dkarakats/edx-platform,ahmedaljazzar/edx-platform,zubair-arbi/edx-platform,DefyVentures/edx-platform,SravanthiSinha/edx-platform,angelapper/edx-platform,synergeticsedx/deployment-wipro,torchingloom/edx-platform,arbrandes/edx-platform,naresh21/synergetics-edx-platform,waheedahmed/edx-platform,EduPepperPD/pepper2013,MakeHer/edx-platform,jbassen/edx-platform,chudaol/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx,jjmiranda/edx-platform,Edraak/edraak-platform,IndonesiaX/edx-platform,wwj718/ANALYSE,jruiperezv/ANALYSE,longmen21/edx-platform,EduPepperPDTesting/pepper2013-testing,fly19890211/edx-platform,chauhanhardik/populo,Softmotions/edx-platform,SravanthiSinha/edx-platform,kmoocdev2/edx-platform,WatanabeYasumasa/edx-platform,valtech-mooc/edx-platform,chudaol/edx-platform,Endika/edx-platform,cyanna/edx-platform,mitocw/edx-platform,J861449197/edx-platform,morpheby/levelup-by,ahmadio/edx-platform,mushtaqak/edx-platform,ubc/edx-platform,devs1991/test_edx_docmode,praveen-pal/edx-platform,inares/edx-platform,utecuy/edx-platform,y12uc231/edx-platform,JCBarahona/edX,procangroup/edx-platform,yokose-ks/edx-platform,knehez/edx-platform,xingyepei/edx-platform,jelugbo/tundex,EduPepperPDTesting/pepper2013-testing,dsajkl/123,cognitiveclass/edx-platform,waheedahmed/edx-platform,edx-solutions/edx-platform,tanmaykm/edx-platform,UOMx/edx-platform,carsongee/edx-platform,prarthitm/edxplatform,playm2mboy/edx-platform,abdoosh00/edx-rtl-final,torchingloom/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo,don-github/edx-platform,tanmaykm/edx-platform,mtlchun/edx,playm2mboy/edx-platform,inares/edx-platform,IITBinterns13/edx-platform-dev,mjg2203/edx-platform-seas,antonve/s4-project-mooc,atsolakid/edx-platform,hkawasaki/kawasaki-aio8-0,pku9104038/edx-platform,ESOedX/edx-platform,peterm-itr/edx-platform,Shrhawk/edx-platform,B-MOOC/edx-platform,nanolearningllc/edx-platform-cypress-2,iivic/BoiseStateX,bdero/edx-platform,eemirtekin/edx-platform,Stanford-Online/edx-platform,louyihua/edx-platform,halvertoluke/edx-platform,chudaol/edx-platform,prarthitm/edxplatform,Lektorium-LLC/edx-platform,shurihell/testasia,mjirayu/sit_academy,ampax/edx-platform,DNFcode/edx-platform,DNFcode/edx-platform,DefyVentures/edx-platform,RPI-OPENEDX/edx-platform,vikas1885/test1,cpennington/edx-platform,pelikanchik/edx-platform,dkarakats/edx-platform,etzhou/edx-platform,IITBinterns13/edx-platform-dev,teltek/edx-platform,cyanna/edx-platform,atsolakid/edx-platform,xingyepei/edx-platform,Livit/Livit.Learn.EdX,IONISx/edx-platform,edx/edx-platform,AkA84/edx-platform,shubhdev/edxOnBaadal,jruiperezv/ANALYSE,synergeticsedx/deployment-wipro,leansoft/edx-platform,Stanford-Online/edx-platform,carsongee/edx-platform,DefyVentures/edx-platform,gymnasium/edx-platform,nttks/jenkins-test,jswope00/griffinx,bitifirefly/edx-platform,gymnasium/edx-platform,mtlchun/edx,UOMx/edx-platform,RPI-OPENEDX/edx-platform,jonathan-beard/edx-platform,LICEF/edx-platform,LICEF/edx-platform,Lektorium-LLC/edx-platform,hkawasaki/kawasaki-aio8-1,zubair-arbi/edx-platform,louyihua/edx-platform,jolyonb/edx-platform,analyseuc3m/ANALYSE-v1,miptliot/edx-platform,iivic/BoiseStateX,Edraak/edraak-platform,mjirayu/sit_academy,lduarte1991/edx-platform,edx/edx-platform,mushtaqak/edx-platform,eestay/edx-platform,CredoReference/edx-platform,vikas1885/test1,pku9104038/edx-platform,mtlchun/edx,hkawasaki/kawasaki-aio8-2,devs1991/test_edx_docmode,antonve/s4-project-mooc,mitocw/edx-platform,ESOedX/edx-platform,rismalrv/edx-platform,shashank971/edx-platform,nikolas/edx-platform,jswope00/griffinx,rhndg/openedx,openfun/edx-platform,Unow/edx-platform,IndonesiaX/edx-platform,shubhdev/openedx,unicri/edx-platform,kamalx/edx-platform,dcosentino/edx-platform,dsajkl/123,shubhdev/edx-platform,jolyonb/edx-platform,antoviaque/edx-platform,shabab12/edx-platform,cselis86/edx-platform,morenopc/edx-platform,ampax/edx-platform-backup,fly19890211/edx-platform,eestay/edx-platform,fly19890211/edx-platform,hamzehd/edx-platform,unicri/edx-platform,ovnicraft/edx-platform,kxliugang/edx-platform,Unow/edx-platform,romain-li/edx-platform,jzoldak/edx-platform,abdoosh00/edraak,jamiefolsom/edx-platform,IndonesiaX/edx-platform,Ayub-Khan/edx-platform,eduNEXT/edx-platform,jazztpt/edx-platform,openfun/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,msegado/edx-platform,CourseTalk/edx-platform,kmoocdev/edx-platform,shubhdev/openedx,jonathan-beard/edx-platform,sudheerchintala/LearnEraPlatForm,marcore/edx-platform,jamesblunt/edx-platform,zhenzhai/edx-platform,Edraak/edraak-platform,jazkarta/edx-platform,jswope00/griffinx,shabab12/edx-platform,gsehub/edx-platform,olexiim/edx-platform,hkawasaki/kawasaki-aio8-1,doismellburning/edx-platform,rationalAgent/edx-platform-custom,procangroup/edx-platform,JioEducation/edx-platform,simbs/edx-platform,solashirai/edx-platform,shubhdev/edx-platform,pelikanchik/edx-platform,EduPepperPDTesting/pepper2013-testing,jelugbo/tundex,motion2015/edx-platform,martynovp/edx-platform,mitocw/edx-platform,caesar2164/edx-platform,sameetb-cuelogic/edx-platform-test,shurihell/testasia,Stanford-Online/edx-platform,morpheby/levelup-by,eduNEXT/edunext-platform,jjmiranda/edx-platform,bigdatauniversity/edx-platform,andyzsf/edx,teltek/edx-platform,nagyistoce/edx-platform,jolyonb/edx-platform,adoosii/edx-platform,hkawasaki/kawasaki-aio8-2,zubair-arbi/edx-platform,rationalAgent/edx-platform-custom,jamesblunt/edx-platform,eemirtekin/edx-platform,doismellburning/edx-platform,pdehaye/theming-edx-platform,carsongee/edx-platform,CredoReference/edx-platform,EduPepperPD/pepper2013,etzhou/edx-platform,msegado/edx-platform,EDUlib/edx-platform,motion2015/a3,shashank971/edx-platform,appliedx/edx-platform,doganov/edx-platform,motion2015/a3,edry/edx-platform,bitifirefly/edx-platform,kxliugang/edx-platform,pepeportela/edx-platform,louyihua/edx-platform,adoosii/edx-platform,dsajkl/reqiop,deepsrijit1105/edx-platform,rhndg/openedx,LICEF/edx-platform,rhndg/openedx,peterm-itr/edx-platform,rue89-tech/edx-platform,praveen-pal/edx-platform,angelapper/edx-platform,hmcmooc/muddx-platform,lduarte1991/edx-platform,stvstnfrd/edx-platform,jbzdak/edx-platform,mcgachey/edx-platform,cpennington/edx-platform,polimediaupv/edx-platform,ferabra/edx-platform,edx-solutions/edx-platform,atsolakid/edx-platform,yokose-ks/edx-platform,leansoft/edx-platform,analyseuc3m/ANALYSE-v1,a-parhom/edx-platform,jazztpt/edx-platform,TsinghuaX/edx-platform,appsembler/edx-platform,IITBinterns13/edx-platform-dev,solashirai/edx-platform,zerobatu/edx-platform,nikolas/edx-platform,chauhanhardik/populo_2,eemirtekin/edx-platform,alexthered/kienhoc-platform,jbzdak/edx-platform,WatanabeYasumasa/edx-platform,zubair-arbi/edx-platform,caesar2164/edx-platform,chand3040/cloud_that,bigdatauniversity/edx-platform,IndonesiaX/edx-platform,ak2703/edx-platform,Edraak/circleci-edx-platform,zofuthan/edx-platform,tiagochiavericosta/edx-platform,kalebhartje/schoolboost,beni55/edx-platform,pepeportela/edx-platform,nttks/jenkins-test,Lektorium-LLC/edx-platform,benpatterson/edx-platform,don-github/edx-platform,Endika/edx-platform,longmen21/edx-platform,martynovp/edx-platform,nttks/edx-platform,hastexo/edx-platform,kxliugang/edx-platform,eduNEXT/edunext-platform,dsajkl/reqiop,nttks/edx-platform,xingyepei/edx-platform,chudaol/edx-platform,waheedahmed/edx-platform,rationalAgent/edx-platform-custom,lduarte1991/edx-platform,doismellburning/edx-platform,jbassen/edx-platform,cognitiveclass/edx-platform,beacloudgenius/edx-platform,SivilTaram/edx-platform,bigdatauniversity/edx-platform,longmen21/edx-platform,pelikanchik/edx-platform,wwj718/edx-platform,MSOpenTech/edx-platform,peterm-itr/edx-platform,pabloborrego93/edx-platform,yokose-ks/edx-platform,motion2015/edx-platform,hmcmooc/muddx-platform,mjirayu/sit_academy,sudheerchintala/LearnEraPlatForm,eduNEXT/edx-platform,a-parhom/edx-platform,olexiim/edx-platform,rhndg/openedx,martynovp/edx-platform,cecep-edu/edx-platform,SravanthiSinha/edx-platform,nikolas/edx-platform,Ayub-Khan/edx-platform,ampax/edx-platform-backup,abdoosh00/edraak,shubhdev/openedx,dsajkl/123,hkawasaki/kawasaki-aio8-1,arbrandes/edx-platform,Edraak/edx-platform,benpatterson/edx-platform,adoosii/edx-platform,Kalyzee/edx-platform,hmcmooc/muddx-platform,shubhdev/edxOnBaadal,deepsrijit1105/edx-platform,pabloborrego93/edx-platform,defance/edx-platform,chrisndodge/edx-platform,Shrhawk/edx-platform,andyzsf/edx,openfun/edx-platform,kamalx/edx-platform,kursitet/edx-platform,LearnEra/LearnEraPlaftform,vismartltd/edx-platform,RPI-OPENEDX/edx-platform,mcgachey/edx-platform,xuxiao19910803/edx,yokose-ks/edx-platform,ampax/edx-platform,pdehaye/theming-edx-platform,ak2703/edx-platform,antoviaque/edx-platform,ahmedaljazzar/edx-platform,cselis86/edx-platform,atsolakid/edx-platform,cognitiveclass/edx-platform,don-github/edx-platform,Kalyzee/edx-platform,LearnEra/LearnEraPlaftform,apigee/edx-platform,PepperPD/edx-pepper-platform,chand3040/cloud_that,CourseTalk/edx-platform,chauhanhardik/populo_2,LearnEra/LearnEraPlaftform,jamiefolsom/edx-platform,amir-qayyum-khan/edx-platform,don-github/edx-platform,fintech-circle/edx-platform,xuxiao19910803/edx-platform,carsongee/edx-platform,4eek/edx-platform,eduNEXT/edx-platform,vikas1885/test1,raccoongang/edx-platform,BehavioralInsightsTeam/edx-platform,kmoocdev/edx-platform,mcgachey/edx-platform,Semi-global/edx-platform,knehez/edx-platform,xinjiguaike/edx-platform,mbareta/edx-platform-ft,solashirai/edx-platform,UOMx/edx-platform,Kalyzee/edx-platform,deepsrijit1105/edx-platform,devs1991/test_edx_docmode,chrisndodge/edx-platform,TsinghuaX/edx-platform,Edraak/edx-platform,4eek/edx-platform,jruiperezv/ANALYSE,mjirayu/sit_academy,jazkarta/edx-platform-for-isc,ahmadio/edx-platform,leansoft/edx-platform,romain-li/edx-platform,chudaol/edx-platform,utecuy/edx-platform,playm2mboy/edx-platform,ahmadiga/min_edx,Kalyzee/edx-platform,y12uc231/edx-platform,TeachAtTUM/edx-platform,mahendra-r/edx-platform,Lektorium-LLC/edx-platform,appliedx/edx-platform,appliedx/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,xuxiao19910803/edx-platform,Ayub-Khan/edx-platform,longmen21/edx-platform,Shrhawk/edx-platform,jamesblunt/edx-platform,EduPepperPDTesting/pepper2013-testing,arifsetiawan/edx-platform,zerobatu/edx-platform,rue89-tech/edx-platform,ovnicraft/edx-platform,franosincic/edx-platform,abdoosh00/edraak,utecuy/edx-platform,wwj718/ANALYSE,zerobatu/edx-platform,dcosentino/edx-platform,ampax/edx-platform,ferabra/edx-platform,Stanford-Online/edx-platform,hastexo/edx-platform,ZLLab-Mooc/edx-platform,chauhanhardik/populo,rationalAgent/edx-platform-custom,kxliugang/edx-platform,franosincic/edx-platform,jamiefolsom/edx-platform,edx/edx-platform,dsajkl/reqiop,miptliot/edx-platform,ak2703/edx-platform,mushtaqak/edx-platform,cyanna/edx-platform,TsinghuaX/edx-platform,Kalyzee/edx-platform,syjeon/new_edx,hamzehd/edx-platform,AkA84/edx-platform,ahmadiga/min_edx,syjeon/new_edx,caesar2164/edx-platform,hkawasaki/kawasaki-aio8-2,inares/edx-platform,nttks/edx-platform,devs1991/test_edx_docmode,pdehaye/theming-edx-platform,JioEducation/edx-platform,Edraak/edx-platform,motion2015/edx-platform,kmoocdev/edx-platform,tiagochiavericosta/edx-platform,4eek/edx-platform,philanthropy-u/edx-platform,MakeHer/edx-platform,hkawasaki/kawasaki-aio8-0,solashirai/edx-platform,morenopc/edx-platform,edry/edx-platform,jzoldak/edx-platform,pabloborrego93/edx-platform,kmoocdev/edx-platform,dkarakats/edx-platform,vismartltd/edx-platform,morpheby/levelup-by,nanolearning/edx-platform,SivilTaram/edx-platform,shashank971/edx-platform,dkarakats/edx-platform,pku9104038/edx-platform,ZLLab-Mooc/edx-platform,morenopc/edx-platform,xinjiguaike/edx-platform,ubc/edx-platform,appsembler/edx-platform,alexthered/kienhoc-platform,xinjiguaike/edx-platform,prarthitm/edxplatform,nagyistoce/edx-platform,doismellburning/edx-platform,hamzehd/edx-platform,rationalAgent/edx-platform-custom,zhenzhai/edx-platform,synergeticsedx/deployment-wipro,xingyepei/edx-platform,iivic/BoiseStateX,JioEducation/edx-platform,simbs/edx-platform,kmoocdev/edx-platform,appsembler/edx-platform,ovnicraft/edx-platform,kamalx/edx-platform,Livit/Livit.Learn.EdX,fintech-circle/edx-platform,edry/edx-platform,UXE/local-edx,chauhanhardik/populo_2,nanolearningllc/edx-platform-cypress-2,iivic/BoiseStateX,zadgroup/edx-platform,vasyarv/edx-platform,CourseTalk/edx-platform,edx-solutions/edx-platform,sudheerchintala/LearnEraPlatForm,PepperPD/edx-pepper-platform,jazkarta/edx-platform,longmen21/edx-platform,Semi-global/edx-platform,romain-li/edx-platform,fly19890211/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,nanolearningllc/edx-platform-cypress,DefyVentures/edx-platform,nanolearningllc/edx-platform-cypress-2,amir-qayyum-khan/edx-platform,Livit/Livit.Learn.EdX,shubhdev/openedx,RPI-OPENEDX/edx-platform,cognitiveclass/edx-platform,hkawasaki/kawasaki-aio8-0,Ayub-Khan/edx-platform,cecep-edu/edx-platform,kmoocdev2/edx-platform,rue89-tech/edx-platform,hkawasaki/kawasaki-aio8-1,eduNEXT/edunext-platform,romain-li/edx-platform,gymnasium/edx-platform,vasyarv/edx-platform,DNFcode/edx-platform,mahendra-r/edx-platform,SivilTaram/edx-platform,edry/edx-platform,dsajkl/123,cognitiveclass/edx-platform,jazkarta/edx-platform,zofuthan/edx-platform,dsajkl/reqiop,rhndg/openedx,analyseuc3m/ANALYSE-v1,lduarte1991/edx-platform,gymnasium/edx-platform,jazkarta/edx-platform-for-isc,wwj718/edx-platform,chauhanhardik/populo_2,valtech-mooc/edx-platform,itsjeyd/edx-platform,wwj718/ANALYSE,ahmadio/edx-platform,TeachAtTUM/edx-platform,amir-qayyum-khan/edx-platform,jbzdak/edx-platform,martynovp/edx-platform,jswope00/griffinx,nanolearningllc/edx-platform-cypress,praveen-pal/edx-platform,franosincic/edx-platform,marcore/edx-platform,polimediaupv/edx-platform,Softmotions/edx-platform,jazztpt/edx-platform,Softmotions/edx-platform,J861449197/edx-platform,don-github/edx-platform,playm2mboy/edx-platform,RPI-OPENEDX/edx-platform,beni55/edx-platform,10clouds/edx-platform,atsolakid/edx-platform,hkawasaki/kawasaki-aio8-2,shurihell/testasia,MakeHer/edx-platform,wwj718/edx-platform,xuxiao19910803/edx-platform,bdero/edx-platform,philanthropy-u/edx-platform,BehavioralInsightsTeam/edx-platform,wwj718/edx-platform,dkarakats/edx-platform,antonve/s4-project-mooc,xingyepei/edx-platform,mtlchun/edx,UXE/local-edx,LICEF/edx-platform,mahendra-r/edx-platform,mitocw/edx-platform,benpatterson/edx-platform,IndonesiaX/edx-platform,kmoocdev2/edx-platform,polimediaupv/edx-platform,naresh21/synergetics-edx-platform,arbrandes/edx-platform,fly19890211/edx-platform,cselis86/edx-platform,xuxiao19910803/edx-platform,nanolearningllc/edx-platform-cypress-2,J861449197/edx-platform,hmcmooc/muddx-platform,ferabra/edx-platform,franosincic/edx-platform,EduPepperPD/pepper2013,amir-qayyum-khan/edx-platform,antonve/s4-project-mooc,mahendra-r/edx-platform,WatanabeYasumasa/edx-platform,ahmedaljazzar/edx-platform,jjmiranda/edx-platform,Unow/edx-platform,jamiefolsom/edx-platform,Edraak/circleci-edx-platform,etzhou/edx-platform,prarthitm/edxplatform,appliedx/edx-platform,kalebhartje/schoolboost,polimediaupv/edx-platform,wwj718/ANALYSE,stvstnfrd/edx-platform,jelugbo/tundex,leansoft/edx-platform,miptliot/edx-platform,ESOedX/edx-platform,eemirtekin/edx-platform,nagyistoce/edx-platform,MSOpenTech/edx-platform,OmarIthawi/edx-platform,4eek/edx-platform,tiagochiavericosta/edx-platform,nttks/jenkins-test,abdoosh00/edx-rtl-final,stvstnfrd/edx-platform,kamalx/edx-platform,ubc/edx-platform,mjg2203/edx-platform-seas,jazkarta/edx-platform-for-isc,jazkarta/edx-platform,morenopc/edx-platform,JCBarahona/edX,torchingloom/edx-platform,mtlchun/edx,ampax/edx-platform,IONISx/edx-platform,Endika/edx-platform,cecep-edu/edx-platform,raccoongang/edx-platform,IONISx/edx-platform,jelugbo/tundex,AkA84/edx-platform,jswope00/GAI,arifsetiawan/edx-platform,yokose-ks/edx-platform,shubhdev/edxOnBaadal,chand3040/cloud_that,nttks/jenkins-test,angelapper/edx-platform,mbareta/edx-platform-ft,zhenzhai/edx-platform,wwj718/edx-platform,chauhanhardik/populo_2,halvertoluke/edx-platform,bdero/edx-platform,knehez/edx-platform,sameetb-cuelogic/edx-platform-test,procangroup/edx-platform,jbassen/edx-platform,shubhdev/edx-platform,appsembler/edx-platform,shurihell/testasia,inares/edx-platform,halvertoluke/edx-platform,unicri/edx-platform,naresh21/synergetics-edx-platform,beni55/edx-platform,nttks/jenkins-test,syjeon/new_edx,polimediaupv/edx-platform,shurihell/testasia,mcgachey/edx-platform,rismalrv/edx-platform,tanmaykm/edx-platform,alu042/edx-platform,arifsetiawan/edx-platform,10clouds/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,zerobatu/edx-platform,xuxiao19910803/edx-platform,wwj718/ANALYSE,abdoosh00/edx-rtl-final,pdehaye/theming-edx-platform,rue89-tech/edx-platform,BehavioralInsightsTeam/edx-platform,mahendra-r/edx-platform,OmarIthawi/edx-platform,B-MOOC/edx-platform,deepsrijit1105/edx-platform,alu042/edx-platform,ampax/edx-platform-backup,teltek/edx-platform,proversity-org/edx-platform,pepeportela/edx-platform,doismellburning/edx-platform,xinjiguaike/edx-platform,shubhdev/edxOnBaadal,raccoongang/edx-platform,bigdatauniversity/edx-platform,jazkarta/edx-platform,mbareta/edx-platform-ft,ahmadiga/min_edx,iivic/BoiseStateX,J861449197/edx-platform,utecuy/edx-platform,franosincic/edx-platform,arifsetiawan/edx-platform,Edraak/edx-platform,TeachAtTUM/edx-platform,etzhou/edx-platform,tiagochiavericosta/edx-platform,gsehub/edx-platform,jonathan-beard/edx-platform,leansoft/edx-platform,PepperPD/edx-pepper-platform,eemirtekin/edx-platform,ak2703/edx-platform,B-MOOC/edx-platform,nagyistoce/edx-platform,EDUlib/edx-platform,vikas1885/test1,LICEF/edx-platform,jswope00/griffinx,jamesblunt/edx-platform,antonve/s4-project-mooc,antoviaque/edx-platform,AkA84/edx-platform,hamzehd/edx-platform,olexiim/edx-platform,defance/edx-platform,EDUlib/edx-platform,dcosentino/edx-platform,valtech-mooc/edx-platform,IITBinterns13/edx-platform-dev,Edraak/circleci-edx-platform,philanthropy-u/edx-platform,jolyonb/edx-platform,ubc/edx-platform,proversity-org/edx-platform,xuxiao19910803/edx,bitifirefly/edx-platform,cyanna/edx-platform,zadgroup/edx-platform,motion2015/edx-platform,JCBarahona/edX,olexiim/edx-platform,kalebhartje/schoolboost,morpheby/levelup-by,CredoReference/edx-platform,msegado/edx-platform,doganov/edx-platform,Unow/edx-platform,auferack08/edx-platform,rue89-tech/edx-platform,miptliot/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,jazkarta/edx-platform-for-isc,ahmadiga/min_edx,vikas1885/test1,a-parhom/edx-platform,UXE/local-edx,shabab12/edx-platform,inares/edx-platform,ovnicraft/edx-platform,xuxiao19910803/edx,jonathan-beard/edx-platform,alexthered/kienhoc-platform,ak2703/edx-platform,jamesblunt/edx-platform,CourseTalk/edx-platform,waheedahmed/edx-platform,simbs/edx-platform,motion2015/a3,EduPepperPDTesting/pepper2013-testing,DefyVentures/edx-platform,arbrandes/edx-platform,devs1991/test_edx_docmode,ZLLab-Mooc/edx-platform,Semi-global/edx-platform,shabab12/edx-platform,alu042/edx-platform,abdoosh00/edx-rtl-final,AkA84/edx-platform,devs1991/test_edx_docmode,etzhou/edx-platform,bdero/edx-platform,praveen-pal/edx-platform,rismalrv/edx-platform,nanolearning/edx-platform,jamiefolsom/edx-platform,auferack08/edx-platform,naresh21/synergetics-edx-platform,LearnEra/LearnEraPlaftform,bigdatauniversity/edx-platform,sameetb-cuelogic/edx-platform-test,Ayub-Khan/edx-platform,pelikanchik/edx-platform,itsjeyd/edx-platform,philanthropy-u/edx-platform,Shrhawk/edx-platform,ampax/edx-platform-backup,antoviaque/edx-platform,shubhdev/edx-platform,jzoldak/edx-platform,zhenzhai/edx-platform,pomegranited/edx-platform,EduPepperPD/pepper2013,simbs/edx-platform,solashirai/edx-platform,ferabra/edx-platform,Semi-global/edx-platform,EduPepperPD/pepper2013,alexthered/kienhoc-platform,doganov/edx-platform,kursitet/edx-platform,jazztpt/edx-platform,halvertoluke/edx-platform,jazkarta/edx-platform-for-isc,pomegranited/edx-platform,playm2mboy/edx-platform,shubhdev/edxOnBaadal
|
---
+++
@@ -6,7 +6,9 @@
def expect_json(view_function):
@wraps(view_function)
def expect_json_with_cloned_request(request, *args, **kwargs):
- if request.META['CONTENT_TYPE'] == "application/json":
+ # cdodge: fix postback errors in CMS. The POST 'content-type' header can include additional information
+ # e.g. 'charset', so we can't do a direct string compare
+ if request.META['CONTENT_TYPE'].lower().startswith("application/json"):
cloned_request = copy.copy(request)
cloned_request.POST = cloned_request.POST.copy()
cloned_request.POST.update(json.loads(request.body))
|
ed271823e5a5f957b17f00fd4823b6ae0b973e83
|
scripts/seam.py
|
scripts/seam.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Produces IPHAS Data Release 2 using an MPI computing cluster."""
from IPython import parallel
from astropy import log
__author__ = 'Geert Barentsen'
# Create the cluster view
client = parallel.Client('/home/gb/.config/ipython/profile_mpi/security/ipcontroller-seaming-client.json')
cluster = client[:]
log.info('Using {0} cores'.format(len(cluster)))
# Sync imports across all nodes
with client[:].sync_imports():
# Make sure the IPHAS DR2 module is in the path
import os
import sys
sys.path.append('/home/gb/dev/iphas-dr2')
client[:].execute("sys.path.append('/home/gb/dev/iphas-dr2')", block=True)
# Import DR2 generation modules
from dr2 import constants
from dr2 import seaming
from dr2 import concatenating
client[:].execute('reload(constants)', block=True)
client[:].execute('reload(seaming)', block=True)
client[:].execute('reload(concatenating)', block=True)
seaming.seam(cluster)
#concatenating.concatenate(cluster)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Produces IPHAS Data Release 2 using an MPI computing cluster."""
from IPython import parallel
from astropy import log
__author__ = 'Geert Barentsen'
# Create the cluster view
client = parallel.Client('/home/gb/.config/ipython/profile_mpi/security/ipcontroller-seaming-client.json')
cluster = client[:]
log.info('Using {0} cores'.format(len(cluster)))
# Sync imports across all nodes
with client[:].sync_imports():
# Make sure the IPHAS DR2 module is in the path
import os
import sys
sys.path.append('/home/gb/dev/iphas-dr2')
client[:].execute("sys.path.append('/home/gb/dev/iphas-dr2')", block=True)
# Import DR2 generation modules
from dr2 import constants
from dr2 import seaming
from dr2 import concatenating
client[:].execute('reload(constants)', block=True)
client[:].execute('reload(seaming)', block=True)
client[:].execute('reload(concatenating)', block=True)
#seaming.seam(cluster)
concatenating.concatenate(client[0,4,8,12])
|
Allow Glazebrook to be executed in a multi-pass fashion + parallelise
|
Allow Glazebrook to be executed in a multi-pass fashion + parallelise
|
Python
|
mit
|
barentsen/iphas-dr2,barentsen/iphas-dr2,barentsen/iphas-dr2
|
---
+++
@@ -27,5 +27,5 @@
client[:].execute('reload(seaming)', block=True)
client[:].execute('reload(concatenating)', block=True)
-seaming.seam(cluster)
-#concatenating.concatenate(cluster)
+#seaming.seam(cluster)
+concatenating.concatenate(client[0,4,8,12])
|
b51c8d107b6da5d6d6b0cc5a1db525bff856a1cf
|
AgileCLU/tests/__init__.py
|
AgileCLU/tests/__init__.py
|
#!/usr/bin/env python
import AgileCLU
import unittest
class AgileCLUTestCase(unittest.TestCase):
def setup(self):
self.agileclu = AgileCLU()
def test_epwbasekey(self):
return
def test_e_pw_hash(self):
return
def test_e_pw_dehash(self):
return
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
import unittest
import AgileCLU
class AgileCLUTestCase(unittest.TestCase):
def test_epwbasekey(self):
hash=AgileCLU.epwbasekey('test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'AbiDicIBaEuvafIuegJWVP8j')
def test_e_pw_hash(self):
hash=AgileCLU.e_pw_hash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, 'jyH0M5b9OyM=')
def test_e_pw_dehash(self):
hash=AgileCLU.e_pw_dehash('teststr', 'test', 'test', 'test.example.com', '/')
self.assertEqual(hash, '87654321')
if __name__ == "__main__":
unittest.main()
|
Add basic asserts for hashing helper functions.
|
Add basic asserts for hashing helper functions.
|
Python
|
bsd-2-clause
|
wylieswanson/AgileCLU
|
---
+++
@@ -1,17 +1,20 @@
#!/usr/bin/env python
+import unittest
import AgileCLU
-import unittest
class AgileCLUTestCase(unittest.TestCase):
- def setup(self):
- self.agileclu = AgileCLU()
def test_epwbasekey(self):
- return
+ hash=AgileCLU.epwbasekey('test', 'test', 'test.example.com', '/')
+ self.assertEqual(hash, 'AbiDicIBaEuvafIuegJWVP8j')
+
def test_e_pw_hash(self):
- return
+ hash=AgileCLU.e_pw_hash('teststr', 'test', 'test', 'test.example.com', '/')
+ self.assertEqual(hash, 'jyH0M5b9OyM=')
+
def test_e_pw_dehash(self):
- return
+ hash=AgileCLU.e_pw_dehash('teststr', 'test', 'test', 'test.example.com', '/')
+ self.assertEqual(hash, '87654321')
if __name__ == "__main__":
unittest.main()
|
52c5f4ddfde8db6179f11c3bec2bc8be69eed238
|
flake8_docstrings.py
|
flake8_docstrings.py
|
# -*- coding: utf-8 -*-
"""pep257 docstrings convention needs error code and class parser for be
included as module into flake8
"""
import pep257
__version__ = '0.2.1.post1'
class pep257Checker(object):
"""flake8 needs a class to check python file."""
name = 'pep257'
version = __version__
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
def run(self):
"""Use directly check() api from pep257."""
for error in pep257.check([self.filename]):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
yield (error.line, 0, error.message, type(self))
|
# -*- coding: utf-8 -*-
"""pep257 docstrings convention needs error code and class parser for be
included as module into flake8
"""
import io
import pep8
import pep257
__version__ = '0.2.1.post1'
class pep257Checker(object):
"""flake8 needs a class to check python file."""
name = 'pep257'
version = __version__
STDIN_NAMES = set(['stdin', '-', '(none)', None])
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
self.source = self.load_source()
self.checker = pep257.PEP257Checker()
def run(self):
"""Use directly check() api from pep257."""
for error in self.checker.check_source(self.source, self.filename):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
yield (error.line, 0, error.message, type(self))
def load_source(self):
if self.filename in self.STDIN_NAMES:
self.filename = 'stdin'
self.source = pep8.stdin_get_value()
else:
with io.open(self.filename, encoding='utf-8') as fd:
self.source = fd.read()
|
Handle stdin in the plugin
|
Handle stdin in the plugin
Closes #2
|
Python
|
mit
|
PyCQA/flake8-docstrings
|
---
+++
@@ -2,6 +2,9 @@
"""pep257 docstrings convention needs error code and class parser for be
included as module into flake8
"""
+import io
+
+import pep8
import pep257
__version__ = '0.2.1.post1'
@@ -14,13 +17,25 @@
name = 'pep257'
version = __version__
+ STDIN_NAMES = set(['stdin', '-', '(none)', None])
+
def __init__(self, tree, filename='(none)', builtins=None):
self.tree = tree
self.filename = filename
+ self.source = self.load_source()
+ self.checker = pep257.PEP257Checker()
def run(self):
"""Use directly check() api from pep257."""
- for error in pep257.check([self.filename]):
+ for error in self.checker.check_source(self.source, self.filename):
# Ignore AllError, Environment error.
if isinstance(error, pep257.Error):
yield (error.line, 0, error.message, type(self))
+
+ def load_source(self):
+ if self.filename in self.STDIN_NAMES:
+ self.filename = 'stdin'
+ self.source = pep8.stdin_get_value()
+ else:
+ with io.open(self.filename, encoding='utf-8') as fd:
+ self.source = fd.read()
|
017ba0d18acb83a5135dd7a23c085b3c93d539b3
|
linkatos/message.py
|
linkatos/message.py
|
import re
link_re = re.compile("https?://\S+(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(0).strip()
return answer
|
import re
link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
"""
Returns the first url in a message. If there aren't any returns None
"""
answer = link_re.search(message)
if answer is not None:
answer = answer.group(2).strip()
return answer
|
Change regex to adapt to the <url> format
|
fix: Change regex to adapt to the <url> format
|
Python
|
mit
|
iwi/linkatos,iwi/linkatos
|
---
+++
@@ -1,6 +1,6 @@
import re
-link_re = re.compile("https?://\S+(\s|$)")
+link_re = re.compile("(\s|^)<(https?://[\w./?+]+)>(\s|$)")
def extract_url(message):
@@ -10,6 +10,6 @@
answer = link_re.search(message)
if answer is not None:
- answer = answer.group(0).strip()
+ answer = answer.group(2).strip()
return answer
|
bfdd095d501c6760e6623aedf3525ba4e21d1637
|
jarviscli/tests/test_auto/test_lyrics.py
|
jarviscli/tests/test_auto/test_lyrics.py
|
import unittest
import os
from mock import call, patch
from packages.lyrics import lyrics
#TODO: add tests for PyLyricsClone
class Lyrics_Test(unittest.TestCase):
def setUp(self):
self.song_name = "everybody dies"
self.artist_name = "ayreon"
self.complete_info = "everybody dies-ayreon"
self.wrong_info = "everybody dies-arebon"
self.module = lyrics()
def test_lyrics_found_given_full_parameters(self):
self.assertIsNotNone(self.module.find(self.complete_info))
def test_lyrics_not_found_given_incomplete_parameter(self):
self.assertEqual(self.module.find(self.song_name), "you forgot to add either song name or artist name")
def test_lyrics_not_found_given_wrong_parameter(self):
self.assertEqual(self.module.find(self.wrong_info), "Song or Singer does not exist or the API does not have lyrics")
self.assertIsNone(self.module.find(self.wrong_info))
def test_split_works(self):
self.assertEqual(self.module.parse(self.complete_info), ["everybody dies", "ayreon"])
|
import unittest
import os
from mock import call, patch
from packages.lyrics import lyrics
#TODO: add tests for PyLyricsClone
class Lyrics_Test(unittest.TestCase):
def setUp(self):
self.song_name = "everybody dies"
self.artist_name = "ayreon"
self.complete_info = "everybody dies-ayreon"
self.wrong_info = "everybody dies-arebon"
self.module = lyrics()
def test_lyrics_found_given_full_parameters(self):
self.assertIsNotNone(self.module.find(self.complete_info))
def test_lyrics_not_found_given_incomplete_parameter(self):
self.assertEqual(self.module.find(self.song_name), "you forgot to add either song name or artist name")
def test_lyrics_not_found_given_wrong_parameter(self):
self.assertEqual(self.module.find(self.wrong_info), "Song or Singer does not exist or the API does not have lyrics")
def test_split_works(self):
self.assertEqual(self.module.parse(self.complete_info), ["everybody dies", "ayreon"])
|
Remove old assert that was wrong
|
Remove old assert that was wrong
|
Python
|
mit
|
sukeesh/Jarvis,appi147/Jarvis,sukeesh/Jarvis,appi147/Jarvis,sukeesh/Jarvis,sukeesh/Jarvis
|
---
+++
@@ -21,7 +21,6 @@
def test_lyrics_not_found_given_wrong_parameter(self):
self.assertEqual(self.module.find(self.wrong_info), "Song or Singer does not exist or the API does not have lyrics")
- self.assertIsNone(self.module.find(self.wrong_info))
def test_split_works(self):
self.assertEqual(self.module.parse(self.complete_info), ["everybody dies", "ayreon"])
|
4c4f4e3e5f1e92d0acdaf1598d4f9716bcd09727
|
app/users/models.py
|
app/users/models.py
|
from datetime import datetime
from app import db, bcrypt
from app.utils.misc import make_code
class User(db.Model):
id = db.Column(db.Integer(), primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
is_admin = db.Column(db.Boolean())
def __init__(self, email, password, is_admin=False):
self.email = email
self.active = True
self.is_admin = is_admin
self.set_password(password)
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def deactivate(self):
self.active = False
class PasswordReset(db.Model):
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))
code = db.Column(db.String(255), unique=True, default=make_code)
date = db.Column(db.DateTime(), default=datetime.now)
user = db.relationship(User)
db.UniqueConstraint('user', 'code', name='uni_user_code')
def __init__(self, user):
self.user = user
|
from datetime import datetime, timedelta
from app import db, bcrypt
from app.utils.misc import make_code
def expiration_date():
return datetime.now() + timedelta(days=1)
class User(db.Model):
id = db.Column(db.Integer(), primary_key=True)
email = db.Column(db.String(255), unique=True)
password = db.Column(db.String(255))
active = db.Column(db.Boolean())
is_admin = db.Column(db.Boolean())
def __init__(self, email, password, is_admin=False):
self.email = email
self.active = True
self.is_admin = is_admin
self.set_password(password)
def set_password(self, password):
self.password = bcrypt.generate_password_hash(password)
def deactivate(self):
self.active = False
class PasswordReset(db.Model):
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))
code = db.Column(db.String(255), unique=True, default=make_code)
date = db.Column(db.DateTime(), default=expiration_date)
user = db.relationship(User)
db.UniqueConstraint('user', 'code', name='uni_user_code')
def __init__(self, user):
self.user = user
|
Put pw reset expiration date in future
|
Put pw reset expiration date in future
|
Python
|
mit
|
projectweekend/Flask-PostgreSQL-API-Seed
|
---
+++
@@ -1,7 +1,11 @@
-from datetime import datetime
+from datetime import datetime, timedelta
from app import db, bcrypt
from app.utils.misc import make_code
+
+
+def expiration_date():
+ return datetime.now() + timedelta(days=1)
class User(db.Model):
@@ -30,7 +34,7 @@
id = db.Column(db.Integer(), primary_key=True)
user_id = db.Column(db.Integer(), db.ForeignKey('user.id'))
code = db.Column(db.String(255), unique=True, default=make_code)
- date = db.Column(db.DateTime(), default=datetime.now)
+ date = db.Column(db.DateTime(), default=expiration_date)
user = db.relationship(User)
|
5f88686bdd089d67192f75eac9d3f46effad2983
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Sergey Margaritov
# Copyright (c) 2013 Sergey Margaritov
#
# License: MIT
#
"""This module exports the scss-lint plugin linter class."""
import os
from SublimeLinter.lint import RubyLinter, util
class Scss(RubyLinter):
"""Provides an interface to the scss-lint executable."""
syntax = ('css', 'sass', 'scss')
cmd = 'ruby -S scss-lint'
regex = r'^.+?:(?P<line>\d+) (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
tempfile_suffix = 'scss'
defaults = {
'--include-linter:,': '',
'--exclude-linter:,': ''
}
inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter')
comment_re = r'^\s*/[/\*]'
config_file = ('--config', '.scss-lint.yml', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Sergey Margaritov
# Copyright (c) 2013 Sergey Margaritov
#
# License: MIT
#
"""This module exports the scss-lint plugin linter class."""
import os
from SublimeLinter.lint import RubyLinter, util
class Scss(RubyLinter):
"""Provides an interface to the scss-lint executable."""
syntax = ('css', 'sass', 'scss')
cmd = 'ruby -S scss-lint'
regex = r'^.+?:(?P<line>\d+)(?::(?P<column>\d+))? (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
tempfile_suffix = 'scss'
defaults = {
'--include-linter:,': '',
'--exclude-linter:,': ''
}
inline_overrides = ('bundle-exec', 'include-linter', 'exclude-linter')
comment_re = r'^\s*/[/\*]'
config_file = ('--config', '.scss-lint.yml', '~')
|
Fix regex for different output from scss-lint 0.49.0
|
Fix regex for different output from scss-lint 0.49.0
|
Python
|
mit
|
attenzione/SublimeLinter-scss-lint
|
---
+++
@@ -20,7 +20,7 @@
syntax = ('css', 'sass', 'scss')
cmd = 'ruby -S scss-lint'
- regex = r'^.+?:(?P<line>\d+) (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
+ regex = r'^.+?:(?P<line>\d+)(?::(?P<column>\d+))? (?:(?P<error>\[E\])|(?P<warning>\[W\])) (?P<message>[^`]*(?:`(?P<near>.+?)`)?.*)'
tempfile_suffix = 'scss'
defaults = {
'--include-linter:,': '',
|
cbbb59aa42676a9adfe25344437cc4284afcac73
|
main.py
|
main.py
|
from flask import Flask, render_template, redirect
import json
app = Flask(__name__)
with open("modules.json", 'r') as fp:
layout = json.load(fp)
@app.route('/')
def main():
return redirect("content/start/", code=302)
@app.route('/content/start/')
def start():
return render_template("start.html", start_link = layout["start"]["target"])
@app.route('/content/<module>/')
def get_content_module(module):
return render_template("content.html",
title = module,
data = layout[module]["data"],
target = layout[module]["target"])
@app.route('/content/end/')
def end():
return render_template("end.html")
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask, render_template, redirect
import json
app = Flask(__name__)
with open("modules.json", 'r') as fp:
layout = json.load(fp)
@app.route('/')
def main():
return redirect("start/", code=302)
@app.route('/start/')
def start():
return render_template("start.html", start_link = layout["start"]["target"])
@app.route('/content/<module>/')
def get_content_module(module):
return render_template("content.html",
title = module,
data = layout[module]["data"],
target = layout[module]["target"])
@app.route('/end/')
def end():
return render_template("end.html")
if __name__ == '__main__':
app.run(debug=True)
|
Modify URLs for start and end
|
Modify URLs for start and end
|
Python
|
mit
|
grenmester/hunt-master,grenmester/hunt-master,grenmester/hunt-master,grenmester/hunt-master,grenmester/hunt-master
|
---
+++
@@ -7,9 +7,9 @@
@app.route('/')
def main():
- return redirect("content/start/", code=302)
+ return redirect("start/", code=302)
-@app.route('/content/start/')
+@app.route('/start/')
def start():
return render_template("start.html", start_link = layout["start"]["target"])
@@ -20,7 +20,7 @@
data = layout[module]["data"],
target = layout[module]["target"])
-@app.route('/content/end/')
+@app.route('/end/')
def end():
return render_template("end.html")
|
4f64f04a2fbbd2b25c38c9e0171be6eeaff070cf
|
main.py
|
main.py
|
#!/usr/bin/env python
from blinkenlights import setup, cleanup
from fourleds import light, clear
from time import sleep
pins = [32, 22, 18, 16]
# blu grn red yel
for p in pins:
setup(p)
for i in range(20):
for p in [32, 22, 18, 16, 18, 22, 32]:
clear(pins)
light(p)
sleep(0.07)
clear(pins)
sleep(0.5)
cleanup()
|
#!/usr/bin/env python
from blinkenlights import setup, cleanup
from fourleds import light, clear
from time import sleep
from random import randint
pins = [32, 22, 18, 16]
# blu grn red yel
for p in pins:
setup(p)
for i in range(20):
k1 = randint(5, 10) * 0.01
k2 = randint(5, 20) * 0.1
for p in [32, 22, 18, 16, 18, 22, 32]:
clear(pins)
light(p)
sleep(k1)
clear(pins)
sleep(k2)
cleanup()
|
Add pleasant surprises in timing
|
Add pleasant surprises in timing
|
Python
|
mit
|
zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie,zimolzak/Raspberry-Pi-newbie
|
---
+++
@@ -3,6 +3,7 @@
from blinkenlights import setup, cleanup
from fourleds import light, clear
from time import sleep
+from random import randint
pins = [32, 22, 18, 16]
# blu grn red yel
@@ -11,11 +12,13 @@
setup(p)
for i in range(20):
+ k1 = randint(5, 10) * 0.01
+ k2 = randint(5, 20) * 0.1
for p in [32, 22, 18, 16, 18, 22, 32]:
clear(pins)
light(p)
- sleep(0.07)
+ sleep(k1)
clear(pins)
- sleep(0.5)
+ sleep(k2)
cleanup()
|
1a150cb57171212358b84e351a0c073baa83d9fd
|
Home/xsOros.py
|
Home/xsOros.py
|
def checkio(array):
if array[0][0] == array[0][1] == array[0][2] or array[0][0] == array[1][0] == array[2][0] or array[0][0] == array[1][1] == array[2][2]:
return array[0][0]
if array[1][0] == array[1][1] == array[1][2] or array[0][1] == array[1][1] == array[2][1] or array[2][0] == array[1][1] == array[0][2]:
return array[1][1]
if array[2][0] == array[2][1] == array[2][2] or array[0][2] == array[1][2] == array[2][2]:
return array[2][2]
return "D"
if __name__ == '__main__':
assert checkio([
"X.O",
"XX.",
"XOO"]) == "X", "Xs wins"
assert checkio([
"OO.",
"XOX",
"XOX"]) == "O", "Os wins"
assert checkio([
"OOX",
"XXO",
"OXX"]) == "D", "Draw"
|
def checkio(array):
if (array[0][0] == array[0][1] == array[0][2] or array[0][0] == array[1][0] == array[2][0] or array[0][0] == array[1][1] == array[2][2]) and array[0][0] != '.':
return array[0][0]
if (array[1][0] == array[1][1] == array[1][2] or array[0][1] == array[1][1] == array[2][1] or array[2][0] == array[1][1] == array[0][2]) and array[1][1] != '.':
return array[1][1]
if (array[2][0] == array[2][1] == array[2][2] or array[0][2] == array[1][2] == array[2][2]) and array[2][2] != '.' :
return array[2][2]
return "D"
if __name__ == '__main__':
assert checkio([
"X.O",
"XX.",
"XOO"]) == "X", "Xs wins"
assert checkio([
"OO.",
"XOX",
"XOX"]) == "O", "Os wins"
assert checkio([
"OOX",
"XXO",
"OXX"]) == "D", "Draw"
assert checkio([
"...",
"XXO",
"XXO"
]) == "D", "Draw"
|
Fix the issue on Xs or Os problem.
|
Fix the issue on Xs or Os problem.
|
Python
|
mit
|
edwardzhu/checkio-solution
|
---
+++
@@ -1,9 +1,9 @@
def checkio(array):
- if array[0][0] == array[0][1] == array[0][2] or array[0][0] == array[1][0] == array[2][0] or array[0][0] == array[1][1] == array[2][2]:
+ if (array[0][0] == array[0][1] == array[0][2] or array[0][0] == array[1][0] == array[2][0] or array[0][0] == array[1][1] == array[2][2]) and array[0][0] != '.':
return array[0][0]
- if array[1][0] == array[1][1] == array[1][2] or array[0][1] == array[1][1] == array[2][1] or array[2][0] == array[1][1] == array[0][2]:
+ if (array[1][0] == array[1][1] == array[1][2] or array[0][1] == array[1][1] == array[2][1] or array[2][0] == array[1][1] == array[0][2]) and array[1][1] != '.':
return array[1][1]
- if array[2][0] == array[2][1] == array[2][2] or array[0][2] == array[1][2] == array[2][2]:
+ if (array[2][0] == array[2][1] == array[2][2] or array[0][2] == array[1][2] == array[2][2]) and array[2][2] != '.' :
return array[2][2]
return "D"
@@ -20,3 +20,8 @@
"OOX",
"XXO",
"OXX"]) == "D", "Draw"
+ assert checkio([
+ "...",
+ "XXO",
+ "XXO"
+ ]) == "D", "Draw"
|
71e36134d23ecca8eacd9ae9549b75c460227e53
|
manifest-parser.py
|
manifest-parser.py
|
#!/usr/bin/env python
import sys
import re
bugre = re.compile("bug\\s+(\\d+)", re.I);
def searchFile(f, path):
first = True
section = ''
for l in f.readlines():
# Skip trailing/leading whitespace
s = l.strip()
# We don't care about top-level comments
if len(s) < 2 or s[0] in ('#', ';'):
continue
if s[0] == '[' and s[-1] == ']':
section = s[1:-1]
continue
if not s.startswith("skip-if"):
continue
reasons = s.split('=', 1)[1].strip()
split = reasons.split('#', 1)
comment = ""
expr = split[0]
if len(split) > 1:
comment = split[1]
if expr.find("e10s") == -1:
continue
bugno = bugre.search(comment)
if section == "DEFAULT":
if not bugno:
print "=== %s - MISSING BUGNUM" % path
else:
print "=== %s - %s" % (path, bugno.group(1))
break
if first:
first = False
print "=== %s" % path
if not bugno:
print "%s - MISSING BUGNUM" % section
else:
print "%s - %s" % (section, bugno.group(1))
for path in sys.argv[1:]:
with open(path) as f:
searchFile(f, path)
|
#!/usr/bin/env python
import sys
import re
bugre = re.compile("bug\\s+(\\d+)", re.I);
def searchFile(f):
path = f.name
first = True
section = ''
for l in f.readlines():
# Skip trailing/leading whitespace
s = l.strip()
# We don't care about top-level comments
if len(s) < 2 or s[0] in ('#', ';'):
continue
if s[0] == '[' and s[-1] == ']':
section = s[1:-1]
continue
if not s.startswith("skip-if"):
continue
reasons = s.split('=', 1)[1].strip()
split = reasons.split('#', 1)
comment = ""
expr = split[0]
if len(split) > 1:
comment = split[1]
if expr.find("e10s") == -1:
continue
bugno = bugre.search(comment)
if section == "DEFAULT":
if not bugno:
print "=== %s - MISSING BUGNUM" % path
else:
print "=== %s - %s" % (path, bugno.group(1))
break
if first:
first = False
print "=== %s" % path
if not bugno:
print "%s - MISSING BUGNUM" % section
else:
print "%s - %s" % (section, bugno.group(1))
for path in sys.argv[1:]:
with open(path) as f:
searchFile(f)
|
Stop passing redundant data around
|
Stop passing redundant data around
|
Python
|
mit
|
mrbkap/disabled-e10s-finder,mrbkap/disabled-e10s-finder
|
---
+++
@@ -5,7 +5,8 @@
bugre = re.compile("bug\\s+(\\d+)", re.I);
-def searchFile(f, path):
+def searchFile(f):
+ path = f.name
first = True
section = ''
@@ -53,4 +54,4 @@
for path in sys.argv[1:]:
with open(path) as f:
- searchFile(f, path)
+ searchFile(f)
|
68c20c259834fa11b9e3e514c918c1776775ad12
|
src/geni/am/fakevm.py
|
src/geni/am/fakevm.py
|
#----------------------------------------------------------------------
# Copyright (c) 2011 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
import uuid
from resource import Resource
class FakeVM(Resource):
def __init__(self, agg):
super(FakeVM, self).__init__(str(uuid.uuid4()), "fakevm")
self._agg = agg
def deprovision(self):
"""Deprovision this resource at the resource provider."""
self._agg.deallocate(containers=None, resources=[self])
|
#----------------------------------------------------------------------
# Copyright (c) 2011 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
import uuid
from resource import Resource
class FakeVM(Resource):
def __init__(self, agg):
super(FakeVM, self).__init__(str(uuid.uuid4()), "fakevm")
self._agg = agg
def deprovision(self):
"""Deprovision this resource at the resource provider."""
self._agg.deallocate(container=None, resources=[self])
|
Fix keyword arg in deallocate cascade.
|
Fix keyword arg in deallocate cascade.
|
Python
|
mit
|
ahelsing/geni-tools,tcmitchell/geni-tools,GENI-NSF/gram,plantigrade/geni-tools,GENI-NSF/gram,tcmitchell/geni-tools,ahelsing/geni-tools,GENI-NSF/gram,plantigrade/geni-tools
|
---
+++
@@ -31,4 +31,4 @@
def deprovision(self):
"""Deprovision this resource at the resource provider."""
- self._agg.deallocate(containers=None, resources=[self])
+ self._agg.deallocate(container=None, resources=[self])
|
359cbd7b45289e364ad262f09dd3d3ef3932eb76
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
from website.app import init_app
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')
from django.core.management import execute_from_command_line
init_app(set_backends=True, routes=False, mfr=False, attach_request_handlers=False)
if 'livereload' in sys.argv:
from django.core.wsgi import get_wsgi_application
from livereload import Server
import django.conf as conf
conf.settings.STATIC_URL = '/static/'
application = get_wsgi_application()
server = Server(application)
server.watch('api/')
server.serve(port=8000)
else:
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
from website.app import init_app
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')
from django.core.management import execute_from_command_line
init_app(set_backends=True, routes=False, attach_request_handlers=False)
if 'livereload' in sys.argv:
from django.core.wsgi import get_wsgi_application
from livereload import Server
import django.conf as conf
conf.settings.STATIC_URL = '/static/'
application = get_wsgi_application()
server = Server(application)
server.watch('api/')
server.serve(port=8000)
else:
execute_from_command_line(sys.argv)
|
Remove mfr kwarg from app init so the API will run
|
Remove mfr kwarg from app init so the API will run
|
Python
|
apache-2.0
|
alexschiller/osf.io,Ghalko/osf.io,dplorimer/osf,mluo613/osf.io,CenterForOpenScience/osf.io,ZobairAlijan/osf.io,cldershem/osf.io,ticklemepierce/osf.io,chennan47/osf.io,aaxelb/osf.io,rdhyee/osf.io,abought/osf.io,crcresearch/osf.io,KAsante95/osf.io,mattclark/osf.io,lyndsysimon/osf.io,brianjgeiger/osf.io,amyshi188/osf.io,jeffreyliu3230/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,adlius/osf.io,kwierman/osf.io,caneruguz/osf.io,samchrisinger/osf.io,crcresearch/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,felliott/osf.io,jeffreyliu3230/osf.io,acshi/osf.io,cosenal/osf.io,caseyrollins/osf.io,kch8qx/osf.io,cosenal/osf.io,kch8qx/osf.io,jnayak1/osf.io,zamattiac/osf.io,pattisdr/osf.io,zachjanicki/osf.io,mluke93/osf.io,barbour-em/osf.io,RomanZWang/osf.io,amyshi188/osf.io,abought/osf.io,KAsante95/osf.io,asanfilippo7/osf.io,mattclark/osf.io,ticklemepierce/osf.io,acshi/osf.io,adlius/osf.io,mluke93/osf.io,chrisseto/osf.io,sloria/osf.io,HalcyonChimera/osf.io,acshi/osf.io,chennan47/osf.io,brianjgeiger/osf.io,icereval/osf.io,KAsante95/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,pattisdr/osf.io,billyhunt/osf.io,doublebits/osf.io,TomHeatwole/osf.io,adlius/osf.io,hmoco/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,danielneis/osf.io,KAsante95/osf.io,zachjanicki/osf.io,caseyrygt/osf.io,caseyrygt/osf.io,asanfilippo7/osf.io,haoyuchen1992/osf.io,njantrania/osf.io,Johnetordoff/osf.io,reinaH/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,petermalcolm/osf.io,danielneis/osf.io,petermalcolm/osf.io,reinaH/osf.io,baylee-d/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,jinluyuan/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,sbt9uc/osf.io,fabianvf/osf.io,doublebits/osf.io,wearpants/osf.io,laurenrevere/osf.io,jinluyuan/osf.io,zachjanicki/osf.io,leb2dg/osf.io,GageGaskins/osf.io,doublebits/osf.io,TomHeatwole/osf.io,danielneis/osf.io,hmoco/osf.io,MerlinZhang/osf.io,hmoco/osf.io,reinaH/osf.io,rdhyee/osf.io,mfraezz/osf.io,mluo613/osf.io,petermalcolm/osf.io,Ghalko/osf.io,kwierman/osf.io,Nesiehr/osf.io,zamattiac/osf.io,fabianvf/osf.io,CenterForOpenScience/osf.io,billyhunt/osf.io,dplorimer/osf,alexschiller/osf.io,mattclark/osf.io,binoculars/osf.io,TomBaxter/osf.io,KAsante95/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,cosenal/osf.io,brandonPurvis/osf.io,njantrania/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,mfraezz/osf.io,cslzchen/osf.io,ckc6cz/osf.io,kch8qx/osf.io,icereval/osf.io,sloria/osf.io,mluke93/osf.io,SSJohns/osf.io,billyhunt/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,MerlinZhang/osf.io,cldershem/osf.io,abought/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,SSJohns/osf.io,saradbowman/osf.io,mluo613/osf.io,arpitar/osf.io,aaxelb/osf.io,lyndsysimon/osf.io,danielneis/osf.io,acshi/osf.io,emetsger/osf.io,Ghalko/osf.io,binoculars/osf.io,samanehsan/osf.io,chrisseto/osf.io,abought/osf.io,saradbowman/osf.io,kch8qx/osf.io,HarryRybacki/osf.io,samanehsan/osf.io,cosenal/osf.io,SSJohns/osf.io,jnayak1/osf.io,Johnetordoff/osf.io,bdyetton/prettychart,dplorimer/osf,jeffreyliu3230/osf.io,TomBaxter/osf.io,chrisseto/osf.io,HarryRybacki/osf.io,samchrisinger/osf.io,barbour-em/osf.io,caseyrygt/osf.io,alexschiller/osf.io,jolene-esposito/osf.io,mfraezz/osf.io,wearpants/osf.io,pattisdr/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,leb2dg/osf.io,haoyuchen1992/osf.io,haoyuchen1992/osf.io,sloria/osf.io,leb2dg/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,DanielSBrown/osf.io,binoculars/osf.io,felliott/osf.io,adlius/osf.io,bdyetton/prettychart,doublebits/osf.io,samanehsan/osf.io,HarryRybacki/osf.io,ZobairAlijan/osf.io,cslzchen/osf.io,ticklemepierce/osf.io,bdyetton/prettychart,cslzchen/osf.io,aaxelb/osf.io,barbour-em/osf.io,bdyetton/prettychart,emetsger/osf.io,njantrania/osf.io,cldershem/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,jnayak1/osf.io,caseyrygt/osf.io,kch8qx/osf.io,njantrania/osf.io,ZobairAlijan/osf.io,emetsger/osf.io,jmcarp/osf.io,MerlinZhang/osf.io,fabianvf/osf.io,baylee-d/osf.io,mluo613/osf.io,crcresearch/osf.io,TomBaxter/osf.io,jolene-esposito/osf.io,amyshi188/osf.io,billyhunt/osf.io,felliott/osf.io,felliott/osf.io,doublebits/osf.io,ckc6cz/osf.io,ckc6cz/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,arpitar/osf.io,petermalcolm/osf.io,samanehsan/osf.io,arpitar/osf.io,jinluyuan/osf.io,jolene-esposito/osf.io,chennan47/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,SSJohns/osf.io,rdhyee/osf.io,wearpants/osf.io,mluke93/osf.io,CenterForOpenScience/osf.io,sbt9uc/osf.io,cwisecarver/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,barbour-em/osf.io,cldershem/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,mfraezz/osf.io,caseyrollins/osf.io,reinaH/osf.io,Nesiehr/osf.io,wearpants/osf.io,monikagrabowska/osf.io,hmoco/osf.io,jinluyuan/osf.io,sbt9uc/osf.io,caseyrollins/osf.io,erinspace/osf.io,kwierman/osf.io,alexschiller/osf.io,caneruguz/osf.io,alexschiller/osf.io,jeffreyliu3230/osf.io,emetsger/osf.io,jmcarp/osf.io,laurenrevere/osf.io,kwierman/osf.io,samchrisinger/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,dplorimer/osf,GageGaskins/osf.io,aaxelb/osf.io,mluo613/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,erinspace/osf.io,cwisecarver/osf.io,GageGaskins/osf.io,acshi/osf.io,monikagrabowska/osf.io,GageGaskins/osf.io,amyshi188/osf.io,erinspace/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,brianjgeiger/osf.io,icereval/osf.io,cslzchen/osf.io,jmcarp/osf.io,arpitar/osf.io,HarryRybacki/osf.io
|
---
+++
@@ -9,7 +9,7 @@
from django.core.management import execute_from_command_line
- init_app(set_backends=True, routes=False, mfr=False, attach_request_handlers=False)
+ init_app(set_backends=True, routes=False, attach_request_handlers=False)
if 'livereload' in sys.argv:
from django.core.wsgi import get_wsgi_application
|
6a8753bcc8f1090e93b1f690bead4fda8e810d76
|
hackingweek/decorators.py
|
hackingweek/decorators.py
|
from django.contrib.auth.decorators import user_passes_test, login_required
from hackingweek.models import Team
has_no_team = user_passes_test(lambda u: u.team_set.all().count() == 0)
def has_no_team_required(view_func):
decorated_view_func = login_required(has_no_team(view_func))
return decorated_view_func
has_team = user_passes_test(lambda u: u.team_set.all().count() == 1 or u.is_staff)
def has_team_required(view_func):
decorated_view_func = login_required(has_team(view_func))
return decorated_view_func
|
from django.contrib.auth.decorators import user_passes_test, login_required
from hackingweek.models import Team
has_no_team = user_passes_test(lambda u: u.team_set.all().count() == 0)
def has_no_team_required(view_func):
decorated_view_func = login_required(has_no_team(view_func))
return decorated_view_func
has_team = user_passes_test(lambda u: u.team_set.all().count() >= 1 or u.is_staff)
def has_team_required(view_func):
decorated_view_func = login_required(has_team(view_func))
return decorated_view_func
|
Allow users with more than one team to quit a team
|
Allow users with more than one team to quit a team
|
Python
|
bsd-2-clause
|
perror/hackingweek,perror/hackingweek,perror/hackingweek
|
---
+++
@@ -8,7 +8,7 @@
decorated_view_func = login_required(has_no_team(view_func))
return decorated_view_func
-has_team = user_passes_test(lambda u: u.team_set.all().count() == 1 or u.is_staff)
+has_team = user_passes_test(lambda u: u.team_set.all().count() >= 1 or u.is_staff)
def has_team_required(view_func):
decorated_view_func = login_required(has_team(view_func))
|
f1957185f0d93861a8ed319223f574df8f4e838f
|
src/graphql_relay/node/plural.py
|
src/graphql_relay/node/plural.py
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if isinstance(input_type, GraphQLNonNull):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
from typing import Any, Callable
from graphql.type import (
GraphQLArgument,
GraphQLField,
GraphQLInputType,
GraphQLOutputType,
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
is_non_null_type,
)
def plural_identifying_root_field(
arg_name: str,
input_type: GraphQLInputType,
output_type: GraphQLOutputType,
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
GraphQLNonNull(GraphQLList(GraphQLNonNull(input_type)))
)
}
def resolve(_obj, info, **args):
inputs = args[arg_name]
return [resolve_single_input(info, input_) for input_ in inputs]
return GraphQLField(
GraphQLList(output_type),
description=description,
args=input_args,
resolve=resolve,
)
|
Use graphql's predicate function instead of 'isinstance'
|
Use graphql's predicate function instead of 'isinstance'
Replicates graphql/graphql-relay-js@5b428507ef246be7ca3afb3589c410874a57f9bc
|
Python
|
mit
|
graphql-python/graphql-relay-py
|
---
+++
@@ -8,6 +8,7 @@
GraphQLList,
GraphQLNonNull,
GraphQLResolveInfo,
+ is_non_null_type,
)
@@ -18,7 +19,7 @@
resolve_single_input: Callable[[GraphQLResolveInfo, str], Any],
description: str = None,
) -> GraphQLField:
- if isinstance(input_type, GraphQLNonNull):
+ if is_non_null_type(input_type):
input_type = input_type.of_type
input_args = {
arg_name: GraphQLArgument(
|
e6d0c5dab7c24b223815aee65d58b4b5191213a9
|
docs/extensions/jira.py
|
docs/extensions/jira.py
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(HTCONDOR-" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 1000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 1000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
import os
import sys
from docutils import nodes
from docutils.parsers.rst import Directive
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a JIRA ticket.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the ticket to link to
:param options: Options dictionary passed to role func.
"""
base = "https://opensciencegrid.atlassian.net/browse/HTCONDOR-"
ref = base + slug
# set_classes(options)
node = nodes.reference(rawtext, "(HTCONDOR-" + slug + ")", refuri=ref, **options)
return node
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issue', str(ticket_id), options)
return [node], []
def setup(app):
app.add_role("jira", ticket_role)
|
Allow Jira tickets over 1000
|
HTCONDOR-1028: Allow Jira tickets over 1000
This used to double check between GitTrac and Jira ticket numbers.
I was tempted to remove the check altogether. However, it would
guard against and unfortunate key bounce. The change is going into
stable, so adding a digit to the number is a minimal change.
|
Python
|
apache-2.0
|
htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor,htcondor/htcondor
|
---
+++
@@ -22,11 +22,11 @@
def ticket_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
try:
ticket_id = int(text)
- if ticket_id > 1000:
+ if ticket_id > 10000:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
- 'HTCondor ticket number must be a number less than or equal to 1000; '
+ 'HTCondor ticket number must be a number less than or equal to 10000; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
|
9d93c74dbabdf776eabe25c36352628e73da5d66
|
drivers/python/setup.py
|
drivers/python/setup.py
|
# Copyright 2010-2012 RethinkDB, all rights reserved.
from setuptools import setup
setup(name="rethinkdb"
,version="1.4.0-2"
,description="This package provides the Python driver library for the RethinkDB database server."
,url="http://rethinkdb.com"
,maintainer="RethinkDB Inc."
,maintainer_email="bugs@rethinkdb.com"
,packages=['rethinkdb']
,install_requires=['protobuf']
)
|
# Copyright 2010-2012 RethinkDB, all rights reserved.
from setuptools import setup
setup(name="rethinkdb"
,version="1.4.0-3"
,description="This package provides the Python driver library for the RethinkDB database server."
,url="http://rethinkdb.com"
,maintainer="RethinkDB Inc."
,maintainer_email="bugs@rethinkdb.com"
,packages=['rethinkdb']
,install_requires=['protobuf']
)
|
Update python driver version to 1.4.0-3
|
Update python driver version to 1.4.0-3
|
Python
|
agpl-3.0
|
matthaywardwebdesign/rethinkdb,victorbriz/rethinkdb,jesseditson/rethinkdb,gdi2290/rethinkdb,wkennington/rethinkdb,bchavez/rethinkdb,sbusso/rethinkdb,mbroadst/rethinkdb,RubenKelevra/rethinkdb,gavioto/rethinkdb,ajose01/rethinkdb,ajose01/rethinkdb,Wilbeibi/rethinkdb,captainpete/rethinkdb,RubenKelevra/rethinkdb,victorbriz/rethinkdb,captainpete/rethinkdb,mcanthony/rethinkdb,rrampage/rethinkdb,rrampage/rethinkdb,scripni/rethinkdb,elkingtonmcb/rethinkdb,wojons/rethinkdb,mbroadst/rethinkdb,ayumilong/rethinkdb,gavioto/rethinkdb,sontek/rethinkdb,wujf/rethinkdb,jmptrader/rethinkdb,urandu/rethinkdb,yaolinz/rethinkdb,AntouanK/rethinkdb,mbroadst/rethinkdb,marshall007/rethinkdb,sebadiaz/rethinkdb,sebadiaz/rethinkdb,wkennington/rethinkdb,marshall007/rethinkdb,lenstr/rethinkdb,JackieXie168/rethinkdb,captainpete/rethinkdb,wujf/rethinkdb,bpradipt/rethinkdb,jmptrader/rethinkdb,4talesa/rethinkdb,yakovenkodenis/rethinkdb,4talesa/rethinkdb,dparnell/rethinkdb,scripni/rethinkdb,niieani/rethinkdb,spblightadv/rethinkdb,wojons/rethinkdb,mquandalle/rethinkdb,sontek/rethinkdb,sbusso/rethinkdb,Wilbeibi/rethinkdb,niieani/rethinkdb,yaolinz/rethinkdb,RubenKelevra/rethinkdb,elkingtonmcb/rethinkdb,bchavez/rethinkdb,greyhwndz/rethinkdb,pap/rethinkdb,mquandalle/rethinkdb,Qinusty/rethinkdb,bchavez/rethinkdb,grandquista/rethinkdb,yakovenkodenis/rethinkdb,jmptrader/rethinkdb,alash3al/rethinkdb,lenstr/rethinkdb,spblightadv/rethinkdb,robertjpayne/rethinkdb,bchavez/rethinkdb,mquandalle/rethinkdb,bchavez/rethinkdb,tempbottle/rethinkdb,greyhwndz/rethinkdb,ajose01/rethinkdb,grandquista/rethinkdb,KSanthanam/rethinkdb,wkennington/rethinkdb,sbusso/rethinkdb,sontek/rethinkdb,ajose01/rethinkdb,rrampage/rethinkdb,alash3al/rethinkdb,elkingtonmcb/rethinkdb,mbroadst/rethinkdb,mbroadst/rethinkdb,bpradipt/rethinkdb,victorbriz/rethinkdb,KSanthanam/rethinkdb,RubenKelevra/rethinkdb,JackieXie168/rethinkdb,captainpete/rethinkdb,gdi2290/rethinkdb,wojons/rethinkdb,Qinusty/rethinkdb,urandu/rethinkdb,Wilbeibi/rethinkdb,alash3al/rethinkdb,elkingtonmcb/rethinkdb,bchavez/rethinkdb,niieani/rethinkdb,ayumilong/rethinkdb,mcanthony/rethinkdb,lenstr/rethinkdb,catroot/rethinkdb,gdi2290/rethinkdb,greyhwndz/rethinkdb,Wilbeibi/rethinkdb,gavioto/rethinkdb,pap/rethinkdb,gdi2290/rethinkdb,losywee/rethinkdb,KSanthanam/rethinkdb,marshall007/rethinkdb,elkingtonmcb/rethinkdb,JackieXie168/rethinkdb,Qinusty/rethinkdb,Wilbeibi/rethinkdb,mcanthony/rethinkdb,rrampage/rethinkdb,eliangidoni/rethinkdb,mquandalle/rethinkdb,AntouanK/rethinkdb,grandquista/rethinkdb,spblightadv/rethinkdb,4talesa/rethinkdb,yakovenkodenis/rethinkdb,ajose01/rethinkdb,AntouanK/rethinkdb,grandquista/rethinkdb,ajose01/rethinkdb,matthaywardwebdesign/rethinkdb,rrampage/rethinkdb,jesseditson/rethinkdb,wujf/rethinkdb,AntouanK/rethinkdb,losywee/rethinkdb,dparnell/rethinkdb,scripni/rethinkdb,mcanthony/rethinkdb,wkennington/rethinkdb,spblightadv/rethinkdb,matthaywardwebdesign/rethinkdb,marshall007/rethinkdb,wojons/rethinkdb,tempbottle/rethinkdb,sbusso/rethinkdb,niieani/rethinkdb,matthaywardwebdesign/rethinkdb,mquandalle/rethinkdb,urandu/rethinkdb,ajose01/rethinkdb,Wilbeibi/rethinkdb,mbroadst/rethinkdb,Qinusty/rethinkdb,wojons/rethinkdb,bchavez/rethinkdb,tempbottle/rethinkdb,yaolinz/rethinkdb,jesseditson/rethinkdb,eliangidoni/rethinkdb,wkennington/rethinkdb,robertjpayne/rethinkdb,wkennington/rethinkdb,matthaywardwebdesign/rethinkdb,scripni/rethinkdb,Qinusty/rethinkdb,alash3al/rethinkdb,mbroadst/rethinkdb,scripni/rethinkdb,lenstr/rethinkdb,RubenKelevra/rethinkdb,captainpete/rethinkdb,gavioto/rethinkdb,sontek/rethinkdb,robertjpayne/rethinkdb,bpradipt/rethinkdb,KSanthanam/rethinkdb,gdi2290/rethinkdb,wkennington/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,mcanthony/rethinkdb,urandu/rethinkdb,elkingtonmcb/rethinkdb,jesseditson/rethinkdb,wkennington/rethinkdb,losywee/rethinkdb,dparnell/rethinkdb,tempbottle/rethinkdb,dparnell/rethinkdb,spblightadv/rethinkdb,jmptrader/rethinkdb,jesseditson/rethinkdb,losywee/rethinkdb,sebadiaz/rethinkdb,yaolinz/rethinkdb,ayumilong/rethinkdb,yaolinz/rethinkdb,RubenKelevra/rethinkdb,wojons/rethinkdb,4talesa/rethinkdb,matthaywardwebdesign/rethinkdb,mcanthony/rethinkdb,wojons/rethinkdb,greyhwndz/rethinkdb,bpradipt/rethinkdb,eliangidoni/rethinkdb,losywee/rethinkdb,sontek/rethinkdb,Qinusty/rethinkdb,mquandalle/rethinkdb,lenstr/rethinkdb,niieani/rethinkdb,Wilbeibi/rethinkdb,rrampage/rethinkdb,catroot/rethinkdb,bpradipt/rethinkdb,ajose01/rethinkdb,victorbriz/rethinkdb,tempbottle/rethinkdb,yakovenkodenis/rethinkdb,JackieXie168/rethinkdb,greyhwndz/rethinkdb,catroot/rethinkdb,gdi2290/rethinkdb,catroot/rethinkdb,victorbriz/rethinkdb,JackieXie168/rethinkdb,losywee/rethinkdb,greyhwndz/rethinkdb,dparnell/rethinkdb,ayumilong/rethinkdb,JackieXie168/rethinkdb,dparnell/rethinkdb,pap/rethinkdb,alash3al/rethinkdb,catroot/rethinkdb,grandquista/rethinkdb,niieani/rethinkdb,pap/rethinkdb,jesseditson/rethinkdb,sbusso/rethinkdb,bpradipt/rethinkdb,ayumilong/rethinkdb,yakovenkodenis/rethinkdb,yaolinz/rethinkdb,losywee/rethinkdb,robertjpayne/rethinkdb,4talesa/rethinkdb,KSanthanam/rethinkdb,dparnell/rethinkdb,scripni/rethinkdb,Qinusty/rethinkdb,sontek/rethinkdb,AntouanK/rethinkdb,captainpete/rethinkdb,victorbriz/rethinkdb,marshall007/rethinkdb,lenstr/rethinkdb,Qinusty/rethinkdb,wujf/rethinkdb,losywee/rethinkdb,alash3al/rethinkdb,Qinusty/rethinkdb,KSanthanam/rethinkdb,elkingtonmcb/rethinkdb,sbusso/rethinkdb,mcanthony/rethinkdb,robertjpayne/rethinkdb,bchavez/rethinkdb,tempbottle/rethinkdb,marshall007/rethinkdb,elkingtonmcb/rethinkdb,bchavez/rethinkdb,scripni/rethinkdb,greyhwndz/rethinkdb,spblightadv/rethinkdb,mquandalle/rethinkdb,mquandalle/rethinkdb,gavioto/rethinkdb,robertjpayne/rethinkdb,mbroadst/rethinkdb,gdi2290/rethinkdb,pap/rethinkdb,sebadiaz/rethinkdb,ayumilong/rethinkdb,grandquista/rethinkdb,pap/rethinkdb,AntouanK/rethinkdb,grandquista/rethinkdb,catroot/rethinkdb,mcanthony/rethinkdb,yakovenkodenis/rethinkdb,captainpete/rethinkdb,JackieXie168/rethinkdb,matthaywardwebdesign/rethinkdb,bpradipt/rethinkdb,dparnell/rethinkdb,victorbriz/rethinkdb,jesseditson/rethinkdb,robertjpayne/rethinkdb,eliangidoni/rethinkdb,yakovenkodenis/rethinkdb,RubenKelevra/rethinkdb,bpradipt/rethinkdb,scripni/rethinkdb,jmptrader/rethinkdb,eliangidoni/rethinkdb,dparnell/rethinkdb,sbusso/rethinkdb,RubenKelevra/rethinkdb,JackieXie168/rethinkdb,spblightadv/rethinkdb,pap/rethinkdb,spblightadv/rethinkdb,pap/rethinkdb,jmptrader/rethinkdb,urandu/rethinkdb,gavioto/rethinkdb,robertjpayne/rethinkdb,captainpete/rethinkdb,4talesa/rethinkdb,tempbottle/rethinkdb,marshall007/rethinkdb,greyhwndz/rethinkdb,sbusso/rethinkdb,rrampage/rethinkdb,wujf/rethinkdb,rrampage/rethinkdb,wojons/rethinkdb,mbroadst/rethinkdb,tempbottle/rethinkdb,niieani/rethinkdb,niieani/rethinkdb,eliangidoni/rethinkdb,lenstr/rethinkdb,sontek/rethinkdb,jmptrader/rethinkdb,yaolinz/rethinkdb,gavioto/rethinkdb,robertjpayne/rethinkdb,grandquista/rethinkdb,KSanthanam/rethinkdb,gavioto/rethinkdb,jesseditson/rethinkdb,wujf/rethinkdb,bpradipt/rethinkdb,catroot/rethinkdb,sebadiaz/rethinkdb,yaolinz/rethinkdb,catroot/rethinkdb,KSanthanam/rethinkdb,jmptrader/rethinkdb,Wilbeibi/rethinkdb,sebadiaz/rethinkdb,matthaywardwebdesign/rethinkdb,urandu/rethinkdb,ayumilong/rethinkdb,JackieXie168/rethinkdb,sontek/rethinkdb,4talesa/rethinkdb,alash3al/rethinkdb,yakovenkodenis/rethinkdb,lenstr/rethinkdb,AntouanK/rethinkdb,victorbriz/rethinkdb,eliangidoni/rethinkdb,eliangidoni/rethinkdb,wujf/rethinkdb,eliangidoni/rethinkdb,sebadiaz/rethinkdb,grandquista/rethinkdb,marshall007/rethinkdb,ayumilong/rethinkdb,urandu/rethinkdb,sebadiaz/rethinkdb,AntouanK/rethinkdb,urandu/rethinkdb
|
---
+++
@@ -2,7 +2,7 @@
from setuptools import setup
setup(name="rethinkdb"
- ,version="1.4.0-2"
+ ,version="1.4.0-3"
,description="This package provides the Python driver library for the RethinkDB database server."
,url="http://rethinkdb.com"
,maintainer="RethinkDB Inc."
|
6e2a484ac46279c6a077fb135d7e5f66605e9b88
|
mox/app.py
|
mox/app.py
|
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from views import mocks
import os
app = Flask(__name__)
app.config["MONGODB_SETTINGS"] = {
"db": "mox"
}
app.config["SECRET_KEY"] = "KeepThisS3cr3t"
if os.environ.get('PRODUCTION'):
app.config["MONGODB_SETTINGS"]["host"] = os.environ.get("PROD_MONGODB")
db = MongoEngine(app)
app.register_blueprint(mocks)
if __name__ == '__main__':
app.run()
|
from flask import Flask
from flask.ext.mongoengine import MongoEngine
from views import mocks
import os
app = Flask(__name__)
app.config["MONGODB_SETTINGS"] = {
"db": "mox"
}
app.config["SECRET_KEY"] = "KeepThisS3cr3t"
if os.environ.get('HEROKU') == 1:
app.config["MONGODB_SETTINGS"]["host"] = os.environ.get("MONGODB_URI")
db = MongoEngine(app)
app.register_blueprint(mocks)
if __name__ == '__main__':
app.run()
|
Fix up settings for Heroku
|
Fix up settings for Heroku
|
Python
|
mit
|
abouzek/mox,abouzek/mox
|
---
+++
@@ -9,8 +9,8 @@
}
app.config["SECRET_KEY"] = "KeepThisS3cr3t"
-if os.environ.get('PRODUCTION'):
- app.config["MONGODB_SETTINGS"]["host"] = os.environ.get("PROD_MONGODB")
+if os.environ.get('HEROKU') == 1:
+ app.config["MONGODB_SETTINGS"]["host"] = os.environ.get("MONGODB_URI")
db = MongoEngine(app)
app.register_blueprint(mocks)
|
2198e43a3701351085ac186a9a8574b788148fcf
|
mysite/mysite/tests/test_middleware.py
|
mysite/mysite/tests/test_middleware.py
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.cm = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
setattr(self.request, 'FACTORY_BOY_MODEL_PATH', 'User')
self.assertEqual(self.cm.process_request(self.request), None)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
from django.contrib.auth.models import User
from django.test import TestCase
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
import json
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
self.request.configure_mock(
**{
'GET': {
'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
'FACTORY_BOY_ARGS': ''
}
}
)
response = self.middleware.process_request(self.request)
self.assertEqual(201, response.status_code)
self.assertEqual(
'johndoe',
json.loads(response.content).get('username')
)
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
Add unit test for factory boy middleware.
|
Add unit test for factory boy middleware.
|
Python
|
apache-2.0
|
kitconcept/robotframework-djangolibrary
|
---
+++
@@ -3,16 +3,32 @@
from DjangoLibrary.middleware import FactoryBoyMiddleware
from mock import Mock
+import json
+
class TestFactoryBoyMiddleware(TestCase):
def setUp(self):
- self.cm = FactoryBoyMiddleware()
+ self.middleware = FactoryBoyMiddleware()
self.request = Mock()
self.request.session = {}
def test_process_request_creates_object(self):
- setattr(self.request, 'FACTORY_BOY_MODEL_PATH', 'User')
- self.assertEqual(self.cm.process_request(self.request), None)
+ self.request.configure_mock(
+ **{
+ 'GET': {
+ 'FACTORY_BOY_MODEL_PATH': 'mysite.tests.factories.UserFactory', # noqa
+ 'FACTORY_BOY_ARGS': ''
+ }
+ }
+ )
+
+ response = self.middleware.process_request(self.request)
+
+ self.assertEqual(201, response.status_code)
+ self.assertEqual(
+ 'johndoe',
+ json.loads(response.content).get('username')
+ )
self.assertEqual(1, len(User.objects.values()))
self.assertEqual('johndoe', User.objects.values()[0]['username'])
|
f176051094b5482f48781f0695835fed5727742c
|
src/webassets/filter/uglifyjs.py
|
src/webassets/filter/uglifyjs.py
|
"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
def output(self, _in, out, **kw):
args = [self.binary]
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
"""Minify Javascript using `UglifyJS <https://github.com/mishoo/UglifyJS/>`_.
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
from webassets.exceptions import FilterError
from webassets.filter import Filter
__all__ = ('UglifySFilter',)
class UglifySFilter(Filter):
name = 'uglifyjs'
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
require=False)
def output(self, _in, out, **kw):
args = [self.binary]
if self.extra_args:
args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, stderr = proc.communicate(_in.read())
if proc.returncode != 0:
raise FilterError(('uglifyjs: subprocess had error: stderr=%s, '+
'stdout=%s, returncode=%s') % (
stderr, stdout, proc.returncode))
out.write(stdout)
|
Allow UglifyJS to accept additional command-line arguments
|
Allow UglifyJS to accept additional command-line arguments
|
Python
|
bsd-2-clause
|
JDeuce/webassets,scorphus/webassets,heynemann/webassets,scorphus/webassets,aconrad/webassets,JDeuce/webassets,glorpen/webassets,wijerasa/webassets,john2x/webassets,heynemann/webassets,aconrad/webassets,heynemann/webassets,wijerasa/webassets,glorpen/webassets,aconrad/webassets,0x1997/webassets,florianjacob/webassets,john2x/webassets,florianjacob/webassets,glorpen/webassets,0x1997/webassets
|
---
+++
@@ -2,7 +2,8 @@
UglifyJS is an external tool written for NodeJS; this filter assumes that
the ``uglifyjs`` executable is in the path. Otherwise, you may define
-a ``UGLIFYJS_BIN`` setting.
+a ``UGLIFYJS_BIN`` setting. Additional options may be passed to ``uglifyjs``
+by setting ``UGLIFYJS_EXTRA_ARGS``, which expects a list of strings.
"""
import subprocess
@@ -20,9 +21,13 @@
def setup(self):
self.binary = self.get_config(
'UGLIFYJS_BIN', require=False) or 'uglifyjs'
+ self.extra_args = self.get_config('UGLIFYJS_EXTRA_ARGS',
+ require=False)
def output(self, _in, out, **kw):
args = [self.binary]
+ if self.extra_args:
+ args.extend(self.extra_args)
proc = subprocess.Popen(
args, stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
|
d97dd4a8f4c0581ce33ed5838dcc0329745041bf
|
pirate_add_shift_recurrence.py
|
pirate_add_shift_recurrence.py
|
#!/usr/bin/python
import sys
import os
from tasklib.task import TaskWarrior
time_attributes = ('wait', 'scheduled')
def is_new_local_recurrence_child_task(task):
# Do not affect tasks not spun by recurrence
if not task['parent']:
return False
# Newly created recurrence tasks actually have
# modified field copied from the parent, thus
# older than entry field (until their ID is generated)
if (task['modified'] - task['entry']).total_seconds() < 0:
return True
tw = TaskWarrior(data_location=os.path.dirname(os.path.dirname(sys.argv[0])))
tw.config.update(dict(recurrence="no"))
def hook_shift_recurrence(task):
if is_new_local_recurrence_child_task(task):
parent = tw.tasks.get(uuid=task['parent'])
parent_due_shift = task['due'] - parent['due']
for attr in time_attributes:
if parent[attr]:
task[attr] = parent[attr] + parent_due_shift
|
#!/usr/bin/python
import sys
import os
from tasklib import TaskWarrior
time_attributes = ('wait', 'scheduled')
def is_new_local_recurrence_child_task(task):
# Do not affect tasks not spun by recurrence
if not task['parent']:
return False
# Newly created recurrence tasks actually have
# modified field copied from the parent, thus
# older than entry field (until their ID is generated)
if (task['modified'] - task['entry']).total_seconds() < 0:
return True
tw = TaskWarrior(data_location=os.path.dirname(os.path.dirname(sys.argv[0])))
tw.overrides.update(dict(recurrence="no", hooks="no"))
def hook_shift_recurrence(task):
if is_new_local_recurrence_child_task(task):
parent = tw.tasks.get(uuid=task['parent'])
parent_due_shift = task['due'] - parent['due']
for attr in time_attributes:
if parent[attr]:
task[attr] = parent[attr] + parent_due_shift
|
Fix old style import and config overrides
|
Fix old style import and config overrides
|
Python
|
mit
|
tbabej/task.shift-recurrence
|
---
+++
@@ -2,7 +2,7 @@
import sys
import os
-from tasklib.task import TaskWarrior
+from tasklib import TaskWarrior
time_attributes = ('wait', 'scheduled')
@@ -18,7 +18,7 @@
return True
tw = TaskWarrior(data_location=os.path.dirname(os.path.dirname(sys.argv[0])))
-tw.config.update(dict(recurrence="no"))
+tw.overrides.update(dict(recurrence="no", hooks="no"))
def hook_shift_recurrence(task):
if is_new_local_recurrence_child_task(task):
|
c429abe7bee0461c8d2874ecb75093246565e58c
|
code/python/Gaussian.py
|
code/python/Gaussian.py
|
import numpy as np
class Gaussian:
"""
An object of this class is a 2D elliptical gaussian
"""
def __init__(self):
"""
Constructor sets up a standard gaussian
"""
self.xc, self.yc, self.mass, self.width, self.q, self.theta =\
0., 0., 1., 1., 1., 0.
self.cos_theta, self.sin_theta = np.cos(self.theta), np.sin(self.theta)
def evaluate(self, x, y):
"""
Evaluate the density.
"""
xx = (x - self.xc)*self.cos_theta + (y - self.yc)*self.sin_theta
yy = -(x - self.xc)*self.sin_theta + (y - self.yc)*self.cos_theta
rsq = self.q*xx**2 + yy**2/self.q
f = np.exp(-0.5*rsq/(self.width**2)**2)
f *= self.mass/(2.*np.pi*self.width**2)
return f
if __name__ == '__main__':
pass
|
import numpy as np
class Gaussian:
"""
An object of this class is a 2D elliptical gaussian
"""
def __init__(self):
"""
Constructor sets up a standard gaussian
"""
self.xc, self.yc, self.mass, self.width, self.q, self.theta =\
0., 0., 1., 1., 1., 0.
def evaluate(self, x, y):
"""
Evaluate the density.
"""
xx = (x - self.xc)*np.cos(self.theta) + (y - self.yc)*np.sin(self.theta)
yy = -(x - self.xc)*np.sin(self.theta) + (y - self.yc)*np.cos(self.theta)
rsq = self.q*xx**2 + yy**2/self.q
f = np.exp(-0.5*rsq/(self.width**2)**2)
f *= self.mass/(2.*np.pi*self.width**2)
return f
if __name__ == '__main__':
import matplotlib.pyplot as plt
# Set up cartesian coordinate grid
x = np.linspace(-5., 5., 1001)
[x, y] = np.meshgrid(x, x[::-1])
# Make a gaussian
gaussian = Gaussian()
gaussian.q = 0.5
gaussian.theta = 30.*np.pi/180.
f = gaussian.evaluate(x, y)
print(f.sum()*(x[0, 1] - x[0, 0])**2)
plt.imshow(f, interpolation='nearest')
plt.show()
|
Make an image of a gaussian
|
Make an image of a gaussian
|
Python
|
mit
|
eggplantbren/MogTrack
|
---
+++
@@ -10,19 +10,32 @@
"""
self.xc, self.yc, self.mass, self.width, self.q, self.theta =\
0., 0., 1., 1., 1., 0.
- self.cos_theta, self.sin_theta = np.cos(self.theta), np.sin(self.theta)
def evaluate(self, x, y):
"""
Evaluate the density.
"""
- xx = (x - self.xc)*self.cos_theta + (y - self.yc)*self.sin_theta
- yy = -(x - self.xc)*self.sin_theta + (y - self.yc)*self.cos_theta
+ xx = (x - self.xc)*np.cos(self.theta) + (y - self.yc)*np.sin(self.theta)
+ yy = -(x - self.xc)*np.sin(self.theta) + (y - self.yc)*np.cos(self.theta)
rsq = self.q*xx**2 + yy**2/self.q
f = np.exp(-0.5*rsq/(self.width**2)**2)
f *= self.mass/(2.*np.pi*self.width**2)
return f
if __name__ == '__main__':
- pass
+ import matplotlib.pyplot as plt
+ # Set up cartesian coordinate grid
+ x = np.linspace(-5., 5., 1001)
+ [x, y] = np.meshgrid(x, x[::-1])
+
+ # Make a gaussian
+ gaussian = Gaussian()
+ gaussian.q = 0.5
+ gaussian.theta = 30.*np.pi/180.
+ f = gaussian.evaluate(x, y)
+ print(f.sum()*(x[0, 1] - x[0, 0])**2)
+
+ plt.imshow(f, interpolation='nearest')
+ plt.show()
+
|
bce7111c2b927290e054dffb765468c41b785947
|
bonspy/tests/test_features.py
|
bonspy/tests/test_features.py
|
from bonspy.features import _apply_operations
def test_apply_operations_domain():
value = _apply_operations('domain', 'www.test.com')
assert value == 'test.com'
def test_apply_operations_segment():
value = _apply_operations('segment', 1)
assert value == 1
|
from bonspy.features import _apply_operations
def test_apply_operations_domain():
value = _apply_operations('domain', 'www.test.com')
assert value == 'test.com'
def test_apply_operations_other_feature():
value = _apply_operations('other_feature', 'www.test.com')
assert value == 'www.test.com'
def test_apply_operations_segment():
value = _apply_operations('segment', 1)
assert value == 1
|
Test that stripping leading www is specific to domain feature
|
Test that stripping leading www is specific to domain feature
|
Python
|
bsd-3-clause
|
markovianhq/bonspy
|
---
+++
@@ -7,6 +7,12 @@
assert value == 'test.com'
+def test_apply_operations_other_feature():
+ value = _apply_operations('other_feature', 'www.test.com')
+
+ assert value == 'www.test.com'
+
+
def test_apply_operations_segment():
value = _apply_operations('segment', 1)
|
077ee72d4febbfa336cf65f92225f0bae350febf
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.1",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
"version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
separate transactions.''',
'website': 'http://www.openerp-experts.com',
'init_xml': [],
"depends": [
'base',
'account_streamline',
],
"data": [
'security/ir.model.access.csv',
'views/account_view.xml',
'views/ledger_type.xml',
'views/menu.xml',
'views/account_journal.xml',
],
'js': [
'static/src/js/account_move_line_alternate_quickadd.js',
],
'qweb': [
'static/src/xml/account_move_line_alternate_quickadd.xml',
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Change version to 1.2.2 (stable)
|
Change version to 1.2.2 (stable)
|
Python
|
agpl-3.0
|
xcgd/alternate_ledger,xcgd/alternate_ledger
|
---
+++
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
{
"name": "Alternate Ledger",
- "version": "1.2.1",
+ "version": "1.2.2",
"author": "XCG Consulting",
"category": 'Accounting',
"description": '''Allow the creation of new accounting ledgers that store
|
9340b43508c4203c81e3feb9607c8a7fe5972eb5
|
tools/skp/page_sets/skia_intelwiki_desktop.py
|
tools/skp/page_sets/skia_intelwiki_desktop.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(30)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology#Capabilities_(GPU_hardware)',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry import story
from telemetry.page import page as page_module
from telemetry.page import shared_page_state
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
name=url,
page_set=page_set,
shared_page_state_class=shared_page_state.SharedDesktopPageState)
self.archive_data_file = 'data/skia_intelwiki_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaIntelwikiDesktopPageSet, self).__init__(
archive_data_file='data/skia_intelwiki_desktop.json')
urls_list = [
# go/skia-skps-3-19
'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
self.AddStory(SkiaBuildbotDesktopPage(url, self))
|
Remove anchor and increase wait time for desk_intelwiki.skp
|
Remove anchor and increase wait time for desk_intelwiki.skp
No-Try: true
Bug: skia:11804
Change-Id: Ib30df7f233bd3c2bcbfdf5c62e803be187a4ff01
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/389712
Commit-Queue: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Reviewed-by: Robert Phillips <95e8ac5504948c7bf91b22c16a8dbb7ae7c66bfd@google.com>
|
Python
|
bsd-3-clause
|
google/skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,google/skia,google/skia,aosp-mirror/platform_external_skia,google/skia
|
---
+++
@@ -21,7 +21,7 @@
def RunNavigateSteps(self, action_runner):
action_runner.Navigate(self.url)
- action_runner.Wait(30)
+ action_runner.Wait(120)
class SkiaIntelwikiDesktopPageSet(story.StorySet):
@@ -34,7 +34,7 @@
urls_list = [
# go/skia-skps-3-19
- 'https://en.wikipedia.org/wiki/Intel_Graphics_Technology#Capabilities_(GPU_hardware)',
+ 'https://en.wikipedia.org/wiki/Intel_Graphics_Technology',
]
for url in urls_list:
|
d29fd721988fc9a75891a636afece63090f46295
|
taiga/projects/references/api.py
|
taiga/projects/references/api.py
|
# -*- coding: utf-8 -*-
from django.db.models.loading import get_model
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from taiga.base import exceptions as exc
from .serializers import ResolverSerializer
class ResolverViewSet(viewsets.ViewSet):
permission_classes = (IsAuthenticated,)
def list(self, request, **kwargs):
serializer = ResolverSerializer(data=request.QUERY_PARAMS)
if not serializer.is_valid():
raise exc.BadRequest(serializer.errors)
data = serializer.data
project_model = get_model("projects", "Project")
project = get_object_or_404(project_model, slug=data["project"])
result = {
"project": project.pk
}
if data["us"]:
result["us"] = get_object_or_404(project.user_stories.all(), ref=data["us"]).pk
if data["task"]:
result["us"] = get_object_or_404(project.tasks.all(), ref=data["task"]).pk
if data["issue"]:
result["issue"] = get_object_or_404(project.issues.all(), ref=data["issue"]).pk
if data["milestone"]:
result["milestone"] = get_object_or_404(project.milestones.all(), slug=data["milestones"]).pk
return Response(result)
|
# -*- coding: utf-8 -*-
from django.db.models.loading import get_model
from django.shortcuts import get_object_or_404
from rest_framework.response import Response
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from taiga.base import exceptions as exc
from .serializers import ResolverSerializer
class ResolverViewSet(viewsets.ViewSet):
permission_classes = (IsAuthenticated,)
def list(self, request, **kwargs):
serializer = ResolverSerializer(data=request.QUERY_PARAMS)
if not serializer.is_valid():
raise exc.BadRequest(serializer.errors)
data = serializer.data
project_model = get_model("projects", "Project")
project = get_object_or_404(project_model, slug=data["project"])
result = {
"project": project.pk
}
if data["us"]:
result["us"] = get_object_or_404(project.user_stories.all(), ref=data["us"]).pk
if data["task"]:
result["us"] = get_object_or_404(project.tasks.all(), ref=data["task"]).pk
if data["issue"]:
result["issue"] = get_object_or_404(project.issues.all(), ref=data["issue"]).pk
if data["milestone"]:
result["milestone"] = get_object_or_404(project.milestones.all(), slug=data["milestone"]).pk
return Response(result)
|
Fix typo that raises KeyError in taskboard
|
Fix typo that raises KeyError in taskboard
|
Python
|
agpl-3.0
|
Tigerwhit4/taiga-back,bdang2012/taiga-back-casting,seanchen/taiga-back,gam-phon/taiga-back,bdang2012/taiga-back-casting,EvgeneOskin/taiga-back,Tigerwhit4/taiga-back,CoolCloud/taiga-back,astagi/taiga-back,obimod/taiga-back,gauravjns/taiga-back,bdang2012/taiga-back-casting,xdevelsistemas/taiga-back-community,astagi/taiga-back,frt-arch/taiga-back,xdevelsistemas/taiga-back-community,gam-phon/taiga-back,taigaio/taiga-back,Zaneh-/bearded-tribble-back,Rademade/taiga-back,Tigerwhit4/taiga-back,19kestier/taiga-back,rajiteh/taiga-back,Tigerwhit4/taiga-back,coopsource/taiga-back,CoolCloud/taiga-back,xdevelsistemas/taiga-back-community,seanchen/taiga-back,joshisa/taiga-back,coopsource/taiga-back,dycodedev/taiga-back,CMLL/taiga-back,gauravjns/taiga-back,dayatz/taiga-back,19kestier/taiga-back,Rademade/taiga-back,crr0004/taiga-back,gauravjns/taiga-back,crr0004/taiga-back,Rademade/taiga-back,joshisa/taiga-back,jeffdwyatt/taiga-back,frt-arch/taiga-back,19kestier/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,CMLL/taiga-back,taigaio/taiga-back,joshisa/taiga-back,dayatz/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,dycodedev/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,jeffdwyatt/taiga-back,forging2012/taiga-back,astronaut1712/taiga-back,WALR/taiga-back,jeffdwyatt/taiga-back,Zaneh-/bearded-tribble-back,frt-arch/taiga-back,WALR/taiga-back,astronaut1712/taiga-back,obimod/taiga-back,WALR/taiga-back,crr0004/taiga-back,Zaneh-/bearded-tribble-back,coopsource/taiga-back,crr0004/taiga-back,CoolCloud/taiga-back,astagi/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,dayatz/taiga-back,gam-phon/taiga-back,astronaut1712/taiga-back,rajiteh/taiga-back,forging2012/taiga-back,rajiteh/taiga-back,obimod/taiga-back,CMLL/taiga-back,CMLL/taiga-back,forging2012/taiga-back,coopsource/taiga-back,astronaut1712/taiga-back,seanchen/taiga-back,obimod/taiga-back,EvgeneOskin/taiga-back,Rademade/taiga-back,dycodedev/taiga-back,dycodedev/taiga-back,WALR/taiga-back,taigaio/taiga-back,rajiteh/taiga-back,jeffdwyatt/taiga-back,astagi/taiga-back,seanchen/taiga-back,gam-phon/taiga-back
|
---
+++
@@ -35,6 +35,6 @@
if data["issue"]:
result["issue"] = get_object_or_404(project.issues.all(), ref=data["issue"]).pk
if data["milestone"]:
- result["milestone"] = get_object_or_404(project.milestones.all(), slug=data["milestones"]).pk
+ result["milestone"] = get_object_or_404(project.milestones.all(), slug=data["milestone"]).pk
return Response(result)
|
e5d16155364c1dae2db238506f236194e2dfb1dc
|
tripleo_common/filters/capabilities_filter.py
|
tripleo_common/filters/capabilities_filter.py
|
# Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filters
class TripleOCapabilitiesFilter(filters.BaseHostFilter):
"""Filter hosts based on capabilities in boot request
The standard Nova ComputeCapabilitiesFilter does not respect capabilities
requested in the scheduler_hints field, so we need a custom one in order
to be able to do predictable placement of nodes.
"""
# list of hosts doesn't change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, spec_obj):
host_node = host_state.stats.get('node')
instance_node = spec_obj.scheduler_hints.get('capabilities:node')
# The instance didn't request a specific node
if not instance_node:
return True
return host_node == instance_node[0]
|
# Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.scheduler import filters
from oslo_log import log as logging
LOG = logging.getLogger(__name__)
class TripleOCapabilitiesFilter(filters.BaseHostFilter):
"""Filter hosts based on capabilities in boot request
The standard Nova ComputeCapabilitiesFilter does not respect capabilities
requested in the scheduler_hints field, so we need a custom one in order
to be able to do predictable placement of nodes.
"""
# list of hosts doesn't change within a request
run_filter_once_per_request = True
def host_passes(self, host_state, spec_obj):
host_node = host_state.stats.get('node')
instance_node = spec_obj.scheduler_hints.get('capabilities:node')
# The instance didn't request a specific node
if not instance_node:
LOG.debug('No specific node requested')
return True
if host_node == instance_node[0]:
LOG.debug('Node tagged %s matches requested node %s', host_node,
instance_node[0])
return True
else:
LOG.debug('Node tagged %s does not match requested node %s',
host_node, instance_node[0])
return False
|
Add logging to capabilities filter
|
Add logging to capabilities filter
The capabilities filter gets incorrectly blamed for a lot of
deployment failures because on a retry of a node deployment the
filter has to fail because there is only one node that can match
when using predictable placement. However, we don't have any
logging to help determine why the filter fails. This adds logging
to explain why nodes fell through, which will hopefully help with
debugging these problems.
Change-Id: I702209e9b5a1e546bf8cb784eec8bdc3c97c63eb
Closes-Bug: 1718502
|
Python
|
apache-2.0
|
openstack/tripleo-common,openstack/tripleo-common
|
---
+++
@@ -13,6 +13,10 @@
# under the License.
from nova.scheduler import filters
+from oslo_log import log as logging
+
+
+LOG = logging.getLogger(__name__)
class TripleOCapabilitiesFilter(filters.BaseHostFilter):
@@ -31,5 +35,13 @@
instance_node = spec_obj.scheduler_hints.get('capabilities:node')
# The instance didn't request a specific node
if not instance_node:
+ LOG.debug('No specific node requested')
return True
- return host_node == instance_node[0]
+ if host_node == instance_node[0]:
+ LOG.debug('Node tagged %s matches requested node %s', host_node,
+ instance_node[0])
+ return True
+ else:
+ LOG.debug('Node tagged %s does not match requested node %s',
+ host_node, instance_node[0])
+ return False
|
ad85d6495343d6089ede2bbf08540341ada93ca8
|
yolk/__init__.py
|
yolk/__init__.py
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8'
|
"""yolk.
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__version__ = '0.8.1'
|
Increment patch version to 0.8.1
|
Increment patch version to 0.8.1
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
---
+++
@@ -6,4 +6,4 @@
"""
-__version__ = '0.8'
+__version__ = '0.8.1'
|
1c5b70610a973ff90dab4253cb525acb7504d239
|
filer/tests/__init__.py
|
filer/tests/__init__.py
|
#-*- coding: utf-8 -*-
from filer.tests.admin import *
from filer.tests.fields import *
from filer.tests.models import *
from filer.tests.permissions import *
from filer.tests.server_backends import *
from filer.tests.tools import *
from filer.tests.utils import *
|
#-*- coding: utf-8 -*-
from filer.tests.admin import *
from filer.tests.models import *
from filer.tests.permissions import *
from filer.tests.server_backends import *
from filer.tests.tools import *
from filer.tests.utils import *
|
Remove field tests import as they no loger exists
|
Remove field tests import as they no loger exists
|
Python
|
bsd-3-clause
|
SmithsonianEnterprises/django-filer,jrief/django-filer,kriwil/django-filer,jakob-o/django-filer,samastur/django-filer,o-zander/django-filer,mkoistinen/django-filer,nimbis/django-filer,belimawr/django-filer,webu/django-filer,kriwil/django-filer,DylannCordel/django-filer,o-zander/django-filer,webu/django-filer,vechorko/django-filer,obigroup/django-filer,divio/django-filer,skirsdeda/django-filer,vechorko/django-filer,maykinmedia/django-filer,civicresourcegroup/django-filer,civicresourcegroup/django-filer,vstoykov/django-filer,thomasbilk/django-filer,maykinmedia/django-filer,divio/django-filer,jrief/django-filer,skirsdeda/django-filer,webu/django-filer,nimbis/django-filer,Flight/django-filer,fusionbox/django-filer,SmithsonianEnterprises/django-filer,divio/django-filer,Flight/django-filer,belimawr/django-filer,samastur/django-filer,alexandrupirjol/django-filer,sbussetti/django-filer,thomasbilk/django-filer,obigroup/django-filer,stefanfoulis/django-filer,DylannCordel/django-filer,SmithsonianEnterprises/django-filer,sopraux/django-filer,20tab/django-filer,matthiask/django-filer,DylannCordel/django-filer,maikelwever/django-filer,civicresourcegroup/django-filer,matthiask/django-filer,bogdal/django-filer,rollstudio/django-filer,sopraux/django-filer,thomasbilk/django-filer,samastur/django-filer,o-zander/django-filer,lory87/django-filer,nimbis/django-filer,mkoistinen/django-filer,vechorko/django-filer,dereknutile/django-filer,stefanfoulis/django-filer,jakob-o/django-filer,skirsdeda/django-filer,lory87/django-filer,sopraux/django-filer,bogdal/django-filer,fusionbox/django-filer,20tab/django-filer,sopraux/django-filer,mkoistinen/django-filer,nephila/django-filer,sbussetti/django-filer,matthiask/django-filer,Flight/django-filer,jrief/django-filer,alexandrupirjol/django-filer,stefanfoulis/django-filer,lory87/django-filer,nephila/django-filer,jakob-o/django-filer,dereknutile/django-filer,dubizzle/django-filer,jakob-o/django-filer,nephila/django-filer,jakob-o/django-filer,Flight/django-filer,20tab/django-filer,fusionbox/django-filer,obigroup/django-filer,vstoykov/django-filer,belimawr/django-filer,belimawr/django-filer,webu/django-filer,maikelwever/django-filer,kriwil/django-filer,maykinmedia/django-filer,matthiask/django-filer,rollstudio/django-filer,o-zander/django-filer,Flight/django-filer,nimbis/django-filer,rollstudio/django-filer,dubizzle/django-filer,vechorko/django-filer,skirsdeda/django-filer,vstoykov/django-filer,lory87/django-filer,stefanfoulis/django-filer,dereknutile/django-filer,DylannCordel/django-filer,skirsdeda/django-filer,maikelwever/django-filer,civicresourcegroup/django-filer,kriwil/django-filer,sbussetti/django-filer,stefanfoulis/django-filer,DylannCordel/django-filer,divio/django-filer,dubizzle/django-filer,SmithsonianEnterprises/django-filer,bogdal/django-filer,mkoistinen/django-filer,alexandrupirjol/django-filer
|
---
+++
@@ -1,6 +1,5 @@
#-*- coding: utf-8 -*-
from filer.tests.admin import *
-from filer.tests.fields import *
from filer.tests.models import *
from filer.tests.permissions import *
from filer.tests.server_backends import *
|
018e76e5aa2a7ca8652af008a3b658017b3f178d
|
thefederation/tests/factories.py
|
thefederation/tests/factories.py
|
import factory
from django.utils.timezone import utc, now
from thefederation.models import Node, Platform, Protocol, Stat
class PlatformFactory(factory.DjangoModelFactory):
name = factory.Faker('word')
class Meta:
model = Platform
class ProtocolFactory(factory.DjangoModelFactory):
name = factory.Faker('word')
class Meta:
model = Protocol
class NodeFactory(factory.DjangoModelFactory):
host = factory.Sequence(lambda n: 'node%s.local' % n)
name = factory.Faker('company')
open_signups = factory.Faker('pybool')
platform = factory.SubFactory(PlatformFactory)
class Meta:
model = Node
class Params:
active = factory.Trait(
last_success = factory.Faker('past_datetime', start_date='-1d', tzinfo=utc),
)
@factory.post_generation
def protocols(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.protocols.add(extracted)
return
self.protocols.add(ProtocolFactory())
class StatFactory(factory.DjangoModelFactory):
date = now().date()
node = factory.SubFactory(NodeFactory)
users_total = factory.Faker('pyint')
users_half_year = factory.Faker('pyint')
users_monthly = factory.Faker('pyint')
users_weekly = factory.Faker('pyint')
class Meta:
model = Stat
|
import factory
from django.utils.timezone import utc, now
from thefederation.models import Node, Platform, Protocol, Stat
class PlatformFactory(factory.DjangoModelFactory):
name = factory.Faker('pystr')
class Meta:
model = Platform
class ProtocolFactory(factory.DjangoModelFactory):
name = factory.Faker('pystr')
class Meta:
model = Protocol
class NodeFactory(factory.DjangoModelFactory):
host = factory.Sequence(lambda n: 'node%s.local' % n)
name = factory.Faker('company')
open_signups = factory.Faker('pybool')
platform = factory.SubFactory(PlatformFactory)
class Meta:
model = Node
class Params:
active = factory.Trait(
last_success = factory.Faker('past_datetime', start_date='-1d', tzinfo=utc),
)
@factory.post_generation
def protocols(self, create, extracted, **kwargs):
if not create:
return
if extracted:
self.protocols.add(extracted)
return
self.protocols.add(ProtocolFactory())
class StatFactory(factory.DjangoModelFactory):
date = now().date()
node = factory.SubFactory(NodeFactory)
users_total = factory.Faker('pyint')
users_half_year = factory.Faker('pyint')
users_monthly = factory.Faker('pyint')
users_weekly = factory.Faker('pyint')
class Meta:
model = Stat
|
Make factory random names a bit more random to avoid clashes
|
Make factory random names a bit more random to avoid clashes
|
Python
|
agpl-3.0
|
jaywink/the-federation.info,jaywink/diaspora-hub,jaywink/diaspora-hub,jaywink/diaspora-hub,jaywink/the-federation.info,jaywink/the-federation.info
|
---
+++
@@ -5,14 +5,14 @@
class PlatformFactory(factory.DjangoModelFactory):
- name = factory.Faker('word')
+ name = factory.Faker('pystr')
class Meta:
model = Platform
class ProtocolFactory(factory.DjangoModelFactory):
- name = factory.Faker('word')
+ name = factory.Faker('pystr')
class Meta:
model = Protocol
|
524ee1cd2f56f6fe968f409d37cbd2af1621e7f3
|
framework/guid/model.py
|
framework/guid/model.py
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def save(self, *args, **kwargs):
""" Ensure GUID on save initialization. """
rv = super(GuidStoredObject, self).save(*args, **kwargs)
self._ensure_guid()
return rv
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
from framework import StoredObject, fields
class Guid(StoredObject):
_id = fields.StringField()
referent = fields.AbstractForeignField()
_meta = {
'optimistic': True,
}
class GuidStoredObject(StoredObject):
# Redirect to content using URL redirect by default
redirect_mode = 'redirect'
def _ensure_guid(self):
"""Create GUID record if current record doesn't already have one, then
point GUID to self.
"""
# Create GUID with specified ID if ID provided
if self._primary_key:
# Done if GUID already exists
guid = Guid.load(self._primary_key)
if guid is not None:
return
# Create GUID
guid = Guid(
_id=self._primary_key,
referent=self
)
guid.save()
# Else create GUID optimistically
else:
# Create GUID
guid = Guid()
guid.save()
guid.referent = (guid._primary_key, self._name)
guid.save()
# Set primary key to GUID key
self._primary_key = guid._primary_key
def save(self, *args, **kwargs):
""" Ensure GUID on save initialization. """
self._ensure_guid()
return super(GuidStoredObject, self).save(*args, **kwargs)
@property
def annotations(self):
""" Get meta-data annotations associated with object. """
return self.metadata__annotated
|
Fix last commit: Must ensure GUID before saving so that PK is defined
|
Fix last commit: Must ensure GUID before saving so that PK is defined
|
Python
|
apache-2.0
|
zkraime/osf.io,emetsger/osf.io,RomanZWang/osf.io,chennan47/osf.io,TomHeatwole/osf.io,adlius/osf.io,cwisecarver/osf.io,petermalcolm/osf.io,mfraezz/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,felliott/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,jmcarp/osf.io,jeffreyliu3230/osf.io,sbt9uc/osf.io,Ghalko/osf.io,cwisecarver/osf.io,aaxelb/osf.io,pattisdr/osf.io,SSJohns/osf.io,billyhunt/osf.io,danielneis/osf.io,GageGaskins/osf.io,ZobairAlijan/osf.io,ckc6cz/osf.io,acshi/osf.io,cslzchen/osf.io,njantrania/osf.io,samchrisinger/osf.io,acshi/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,lamdnhan/osf.io,njantrania/osf.io,billyhunt/osf.io,zamattiac/osf.io,felliott/osf.io,rdhyee/osf.io,alexschiller/osf.io,mluo613/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,alexschiller/osf.io,crcresearch/osf.io,kwierman/osf.io,arpitar/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,abought/osf.io,mfraezz/osf.io,ckc6cz/osf.io,MerlinZhang/osf.io,fabianvf/osf.io,himanshuo/osf.io,mattclark/osf.io,mluo613/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,samchrisinger/osf.io,TomBaxter/osf.io,himanshuo/osf.io,mluo613/osf.io,zkraime/osf.io,KAsante95/osf.io,revanthkolli/osf.io,himanshuo/osf.io,barbour-em/osf.io,adlius/osf.io,caneruguz/osf.io,fabianvf/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,binoculars/osf.io,doublebits/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,pattisdr/osf.io,barbour-em/osf.io,laurenrevere/osf.io,revanthkolli/osf.io,jeffreyliu3230/osf.io,barbour-em/osf.io,cldershem/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,danielneis/osf.io,jinluyuan/osf.io,danielneis/osf.io,chrisseto/osf.io,lyndsysimon/osf.io,jeffreyliu3230/osf.io,jinluyuan/osf.io,jeffreyliu3230/osf.io,chennan47/osf.io,ticklemepierce/osf.io,aaxelb/osf.io,mluke93/osf.io,samchrisinger/osf.io,SSJohns/osf.io,binoculars/osf.io,reinaH/osf.io,mluo613/osf.io,cosenal/osf.io,CenterForOpenScience/osf.io,Ghalko/osf.io,caneruguz/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,bdyetton/prettychart,jinluyuan/osf.io,baylee-d/osf.io,icereval/osf.io,kushG/osf.io,reinaH/osf.io,jolene-esposito/osf.io,mattclark/osf.io,ckc6cz/osf.io,reinaH/osf.io,mattclark/osf.io,zachjanicki/osf.io,wearpants/osf.io,asanfilippo7/osf.io,cldershem/osf.io,cldershem/osf.io,laurenrevere/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,zkraime/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,mluke93/osf.io,zamattiac/osf.io,GaryKriebel/osf.io,TomBaxter/osf.io,lamdnhan/osf.io,brianjgeiger/osf.io,dplorimer/osf,cosenal/osf.io,aaxelb/osf.io,erinspace/osf.io,kushG/osf.io,saradbowman/osf.io,chrisseto/osf.io,kushG/osf.io,asanfilippo7/osf.io,caseyrygt/osf.io,Ghalko/osf.io,hmoco/osf.io,acshi/osf.io,brandonPurvis/osf.io,MerlinZhang/osf.io,SSJohns/osf.io,AndrewSallans/osf.io,leb2dg/osf.io,ticklemepierce/osf.io,RomanZWang/osf.io,cslzchen/osf.io,wearpants/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,samanehsan/osf.io,kushG/osf.io,chrisseto/osf.io,binoculars/osf.io,arpitar/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,samanehsan/osf.io,dplorimer/osf,acshi/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,felliott/osf.io,amyshi188/osf.io,mluke93/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,dplorimer/osf,zkraime/osf.io,jolene-esposito/osf.io,rdhyee/osf.io,amyshi188/osf.io,doublebits/osf.io,lamdnhan/osf.io,mluo613/osf.io,Nesiehr/osf.io,brandonPurvis/osf.io,arpitar/osf.io,amyshi188/osf.io,jolene-esposito/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,cslzchen/osf.io,wearpants/osf.io,cldershem/osf.io,billyhunt/osf.io,njantrania/osf.io,DanielSBrown/osf.io,adlius/osf.io,jnayak1/osf.io,samanehsan/osf.io,felliott/osf.io,doublebits/osf.io,jolene-esposito/osf.io,crcresearch/osf.io,bdyetton/prettychart,bdyetton/prettychart,barbour-em/osf.io,lamdnhan/osf.io,Ghalko/osf.io,sbt9uc/osf.io,KAsante95/osf.io,sloria/osf.io,kch8qx/osf.io,erinspace/osf.io,zamattiac/osf.io,caneruguz/osf.io,kwierman/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,emetsger/osf.io,alexschiller/osf.io,doublebits/osf.io,caseyrygt/osf.io,ticklemepierce/osf.io,Nesiehr/osf.io,jinluyuan/osf.io,cosenal/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,kch8qx/osf.io,HarryRybacki/osf.io,MerlinZhang/osf.io,icereval/osf.io,kwierman/osf.io,alexschiller/osf.io,adlius/osf.io,icereval/osf.io,leb2dg/osf.io,jmcarp/osf.io,kch8qx/osf.io,jnayak1/osf.io,hmoco/osf.io,fabianvf/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,kch8qx/osf.io,CenterForOpenScience/osf.io,HarryRybacki/osf.io,bdyetton/prettychart,dplorimer/osf,aaxelb/osf.io,njantrania/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,rdhyee/osf.io,SSJohns/osf.io,billyhunt/osf.io,chennan47/osf.io,GaryKriebel/osf.io,chrisseto/osf.io,crcresearch/osf.io,revanthkolli/osf.io,kch8qx/osf.io,jnayak1/osf.io,danielneis/osf.io,GageGaskins/osf.io,amyshi188/osf.io,RomanZWang/osf.io,abought/osf.io,lyndsysimon/osf.io,cosenal/osf.io,KAsante95/osf.io,hmoco/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,mluke93/osf.io,GaryKriebel/osf.io,GaryKriebel/osf.io,reinaH/osf.io,leb2dg/osf.io,caneruguz/osf.io,emetsger/osf.io,jmcarp/osf.io,leb2dg/osf.io,mfraezz/osf.io,haoyuchen1992/osf.io,brandonPurvis/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,caseyrygt/osf.io,kwierman/osf.io,AndrewSallans/osf.io,haoyuchen1992/osf.io,KAsante95/osf.io,jnayak1/osf.io,arpitar/osf.io,zamattiac/osf.io,petermalcolm/osf.io,HarryRybacki/osf.io,emetsger/osf.io,RomanZWang/osf.io,sloria/osf.io,erinspace/osf.io,fabianvf/osf.io,sloria/osf.io,acshi/osf.io,caseyrollins/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,MerlinZhang/osf.io,himanshuo/osf.io,saradbowman/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,sbt9uc/osf.io,rdhyee/osf.io,abought/osf.io,HarryRybacki/osf.io,TomHeatwole/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,abought/osf.io,lyndsysimon/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,ticklemepierce/osf.io,revanthkolli/osf.io,KAsante95/osf.io
|
---
+++
@@ -50,9 +50,8 @@
def save(self, *args, **kwargs):
""" Ensure GUID on save initialization. """
- rv = super(GuidStoredObject, self).save(*args, **kwargs)
self._ensure_guid()
- return rv
+ return super(GuidStoredObject, self).save(*args, **kwargs)
@property
def annotations(self):
|
587c1490538c610cdd885667720d3ad27da7eb83
|
main.py
|
main.py
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', redditor=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
import praw
import humanize
from datetime import datetime
from flask import Flask
from flask import request, render_template
from prawoauth2 import PrawOAuth2Mini
from settings import (app_key, app_secret, access_token, refresh_token,
user_agent, scopes)
reddit_client = praw.Reddit(user_agent=user_agent)
oauth_helper = PrawOAuth2Mini(reddit_client, app_key=app_key,
app_secret=app_secret,
access_token=access_token,
refresh_token=refresh_token, scopes=scopes)
app = Flask(__name__)
def get_cake_day(username):
redditor = reddit_client.get_redditor(username)
try:
created_on = datetime.utcfromtimestamp(redditor.created_utc)
except praw.errors.NotFound:
return False
oauth_helper.refresh()
return(humanize.naturalday(created_on))
@app.route('/')
def index():
error_message = 'Redditor does not exist or Shadowbanned'
username = request.values.get('username')
if not username:
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
return render_template('result.html', username=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
if __name__ == '__main__':
app.run(debug=True)
|
Use `username` instead of `redditor` in templates
|
Use `username` instead of `redditor` in templates
|
Python
|
mit
|
avinassh/kekday,avinassh/kekday
|
---
+++
@@ -34,7 +34,7 @@
return render_template('index.html')
cakeday = get_cake_day(username)
if cakeday:
- return render_template('result.html', redditor=username,
+ return render_template('result.html', username=username,
cakeday=cakeday)
return render_template('index.html', error_message=error_message)
|
3992e424169a9ac6eb0d13c03045139403dc27cf
|
main.py
|
main.py
|
import hashlib
import models
import os
import os.path
import peewee
def init():
models.db.connect()
models.db.create_tables([models.Entry])
def digest(file_path):
h = hashlib.sha1()
file = open(file_path, 'rb')
buf = file.read(8192)
while len(buf) > 0:
h.update(buf)
buf = file.read(8192)
return h.hexdigest()
def traverse(path):
path = os.path.abspath(path)
for (dir_path, dirs, files) in os.walk(path):
buf = []
for file_name in sorted(files):
file_path = os.path.join(dir_path, file_name)
entry = {
'path': file_path,
'size': os.path.getsize(file_path),
'last_modified': os.path.getmtime(file_path),
'hash_str': digest(file_path)
}
buf.append(entry)
if len(buf) >= 256:
print('Writing chunks until', file_name)
models.Entry.insert_many(buf).execute()
buf.clear()
def reduce():
from models import Entry
from peewee import fn, SQL
duplicates = Entry
.select(Entry.hash_str, fn.COUNT(Entry.hash_str).alias('occurrence'))
.group_by(Entry.hash_str).having(SQL('occurrence') > 1)
return duplicates
|
import hashlib
import models
import os
import os.path
def init():
models.db.connect()
models.db.create_tables([models.Entry])
def digest(file_path):
h = hashlib.sha1()
file = open(file_path, 'rb')
buf = file.read(8192)
while len(buf) > 0:
h.update(buf)
buf = file.read(8192)
return h.hexdigest()
def traverse(path):
path = os.path.abspath(path)
for (dir_path, dirs, files) in os.walk(path):
buf = []
for file_name in sorted(files):
file_path = os.path.join(dir_path, file_name)
entry = {
'path': file_path,
'size': os.path.getsize(file_path),
'last_modified': os.path.getmtime(file_path),
'hash_str': digest(file_path)
}
buf.append(entry)
if len(buf) >= 256:
print('Writing chunks until', file_name)
models.Entry.insert_many(buf).execute()
buf.clear()
def reduce():
from models import Entry
from peewee import fn, SQL
duplicates = (Entry
.select(Entry.hash_str, fn.COUNT(Entry.hash_str).alias('occurrence'))
.group_by(Entry.hash_str)
.having(SQL('occurrence') > 1))
return duplicates
|
Fix import and indent issue
|
Fix import and indent issue
|
Python
|
mit
|
rschiang/pineapple.py
|
---
+++
@@ -2,7 +2,6 @@
import models
import os
import os.path
-import peewee
def init():
models.db.connect()
@@ -40,7 +39,8 @@
def reduce():
from models import Entry
from peewee import fn, SQL
- duplicates = Entry
+ duplicates = (Entry
.select(Entry.hash_str, fn.COUNT(Entry.hash_str).alias('occurrence'))
- .group_by(Entry.hash_str).having(SQL('occurrence') > 1)
+ .group_by(Entry.hash_str)
+ .having(SQL('occurrence') > 1))
return duplicates
|
2567e56c7c17754e18346b21bcad6eab713276ea
|
googlebot/middleware.py
|
googlebot/middleware.py
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
import socket
from django.contrib.auth.models import AnonymousUser
from django.contrib.auth.models import User
class GooglebotMiddleware(object):
"""
Middleware to automatically log in the Googlebot with the user account 'googlebot'
"""
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
if request.META.get('HTTP_USER_AGENT'):
if 'Googlebot' in request.META['HTTP_USER_AGENT']:
try:
remote_ip = request.META['REMOTE_ADDR']
hostname = socket.gethostbyaddr(remote_ip)[0]
if hostname.endswith('googlebot.com'):
request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
request.is_googlebot = True
else:
# FAKE googlebot!!!!
request.is_googlebot = False
except Exception, e:
pass # Don't bring down the site
return None
|
Check to see if request.META contains HTTP_USER_AGENT
|
Check to see if request.META contains HTTP_USER_AGENT
|
Python
|
bsd-3-clause
|
macropin/django-googlebot
|
---
+++
@@ -11,18 +11,19 @@
def process_request(self, request):
request.is_googlebot = False # Assume false, until proven
if request.user == AnonymousUser():
- if 'Googlebot' in request.META['HTTP_USER_AGENT']:
- try:
- remote_ip = request.META['REMOTE_ADDR']
- hostname = socket.gethostbyaddr(remote_ip)[0]
+ if request.META.get('HTTP_USER_AGENT'):
+ if 'Googlebot' in request.META['HTTP_USER_AGENT']:
+ try:
+ remote_ip = request.META['REMOTE_ADDR']
+ hostname = socket.gethostbyaddr(remote_ip)[0]
- if hostname.endswith('googlebot.com'):
- request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
- request.is_googlebot = True
- else:
- # FAKE googlebot!!!!
- request.is_googlebot = False
+ if hostname.endswith('googlebot.com'):
+ request.user = User.objects.get_or_create(username='googlebot') # login our googlebot user :)
+ request.is_googlebot = True
+ else:
+ # FAKE googlebot!!!!
+ request.is_googlebot = False
- except Exception, e:
- pass # Don't bring down the site
+ except Exception, e:
+ pass # Don't bring down the site
return None
|
a270b7ea7636cd70b38f7e3534871a76ea2cdae1
|
rejected/example.py
|
rejected/example.py
|
"""Example Rejected Consumer"""
from rejected import consumer
import random
from tornado import gen
from tornado import httpclient
__version__ = '1.0.0'
class ExampleConsumer(consumer.SmartConsumer):
def process(self):
self.logger.info('Message: %r', self.body)
"""
action = random.randint(0, 100)
if action == 0:
raise consumer.ConsumerException('zomg')
elif action < 5:
raise consumer.MessageException('reject')
elif action < 10:
raise consumer.ProcessingException('publish')
"""
class AsyncExampleConsumer(consumer.Consumer):
@gen.coroutine
def process(self):
self.logger.info('Message: %r', self.body)
http_client = httpclient.AsyncHTTPClient()
results = yield [http_client.fetch('http://www.google.com'),
http_client.fetch('http://www.bing.com')]
self.logger.info('Length: %r', [len(r.body) for r in results])
|
"""Example Rejected Consumer"""
from rejected import consumer
import random
from tornado import gen
from tornado import httpclient
__version__ = '1.0.0'
class ExampleConsumer(consumer.SmartConsumer):
def process(self):
self.logger.info('Message: %r', self.body)
action = random.randint(0, 100)
if action == 0:
raise consumer.ConsumerException('zomg')
elif action < 5:
raise consumer.MessageException('reject')
elif action < 10:
raise consumer.ProcessingException('publish')
class AsyncExampleConsumer(consumer.Consumer):
@gen.coroutine
def process(self):
self.logger.info('Message: %r', self.body)
http_client = httpclient.AsyncHTTPClient()
results = yield [http_client.fetch('http://www.google.com'),
http_client.fetch('http://www.bing.com')]
self.logger.info('Length: %r', [len(r.body) for r in results])
|
Remove the commented out block
|
Remove the commented out block
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
---
+++
@@ -13,7 +13,6 @@
def process(self):
self.logger.info('Message: %r', self.body)
- """
action = random.randint(0, 100)
if action == 0:
raise consumer.ConsumerException('zomg')
@@ -21,9 +20,6 @@
raise consumer.MessageException('reject')
elif action < 10:
raise consumer.ProcessingException('publish')
-
- """
-
class AsyncExampleConsumer(consumer.Consumer):
|
5db17915435eb569bf7644019b9fdbf94f18114a
|
tests/conftest.py
|
tests/conftest.py
|
import StringIO
import PIL
import pytest
from django.core.files.uploadedfile import InMemoryUploadedFile
class Image:
def __init__(self):
self.dimensions = (100, 100)
self.color = 'blue'
self.name = 'image.jpg'
@property
def django_file(self):
# Create a new Django file-like object to be used in models as ImageField using
# InMemoryUploadedFile. If you look at the source in Django, a
# SimpleUploadedFile is essentially instantiated similarly to what is shown here
return InMemoryUploadedFile(self.image_file, None, self.name, 'image/jpeg',
self.image_file.len, None)
@property
def image_file(self):
# Create a file-like object to write thumb data (thumb data previously created
# using PIL, and stored in variable 'thumb')
image_io = StringIO.StringIO()
self.pil_image.save(image_io, format='JPEG')
image_io.seek(0)
return image_io
@property
def pil_image(self):
return PIL.Image.new('RGB', self.dimensions, self.color)
@pytest.fixture()
def image():
return Image()
|
import StringIO
import PIL
import pytest
from django.core.files.uploadedfile import InMemoryUploadedFile
class Image:
def __init__(self):
self.dimensions = (10, 10)
self.color = 'blue'
self.name = 'image.jpg'
@property
def django_file(self):
# Create a new Django file-like object to be used in models as ImageField using
# InMemoryUploadedFile. If you look at the source in Django, a
# SimpleUploadedFile is essentially instantiated similarly to what is shown here
return InMemoryUploadedFile(self.image_file, None, self.name, 'image/jpeg',
self.image_file.len, None)
@property
def image_file(self):
# Create a file-like object to write thumb data (thumb data previously created
# using PIL, and stored in variable 'thumb')
image_io = StringIO.StringIO()
self.pil_image.save(image_io, format='JPEG')
image_io.seek(0)
return image_io
@property
def pil_image(self):
return PIL.Image.new('RGB', self.dimensions, self.color)
@pytest.fixture()
def image():
return Image()
|
Reduce size of testings images by default to speed up tests
|
Reduce size of testings images by default to speed up tests
|
Python
|
mit
|
saulshanabrook/django-simpleimages
|
---
+++
@@ -8,7 +8,7 @@
class Image:
def __init__(self):
- self.dimensions = (100, 100)
+ self.dimensions = (10, 10)
self.color = 'blue'
self.name = 'image.jpg'
|
35f59e256224b82c82b2be3af4cd22e43443bc9f
|
mgsv_names.py
|
mgsv_names.py
|
from __future__ import unicode_literals, print_function
import sqlite3, os, random
_select = 'select {} from {} order by random() limit 1'
_uncommon_select = 'select value from uncommons where key=?'
def generate_name():
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db'))
cursor = conn.cursor()
adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0]
anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0]
rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0]
uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone()
uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone()
conn.close()
r = random.random()
if r < 0.001 or r >= 0.999:
return rare
elif r < 0.3 and uncommon_anim is not None:
return ' '.join((adj, uncommon_anim[0]))
elif r >= 0.7 and uncommon_adj is not None:
return ' '.join((uncommon_adj[0], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(generate_name())
|
from __future__ import unicode_literals, print_function
import sqlite3, os, random
_select = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});'
_uncommon_select = 'select value from uncommons where key=?;'
def generate_name():
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db'))
cursor = conn.cursor()
adj = cursor.execute(_select.format('adjective', 'adjectives')).fetchone()[0]
anim = cursor.execute(_select.format('animal', 'animals')).fetchone()[0]
rare = cursor.execute(_select.format('name', 'rares')).fetchone()[0]
uncommon_anim = cursor.execute(_uncommon_select, [adj]).fetchone()
uncommon_adj = cursor.execute(_uncommon_select, [anim]).fetchone()
conn.close()
r = random.random()
if r < 0.001 or r >= 0.999:
return rare
elif r < 0.3 and uncommon_anim is not None:
return ' '.join((adj, uncommon_anim[0]))
elif r >= 0.7 and uncommon_adj is not None:
return ' '.join((uncommon_adj[0], anim))
return ' '.join((adj, anim))
if __name__ == '__main__':
print(generate_name())
|
Update SQL for efficiency and semicolons.
|
Update SQL for efficiency and semicolons.
|
Python
|
unlicense
|
rotated8/mgsv_names
|
---
+++
@@ -1,8 +1,8 @@
from __future__ import unicode_literals, print_function
import sqlite3, os, random
-_select = 'select {} from {} order by random() limit 1'
-_uncommon_select = 'select value from uncommons where key=?'
+_select = 'select {0} from {1} limit 1 offset abs(random()) % (select count({0}) from {1});'
+_uncommon_select = 'select value from uncommons where key=?;'
def generate_name():
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), 'names.db'))
|
1a5a5268cea83a7d29346a677e9d10ec9e5411e8
|
cuteshop/downloaders/git.py
|
cuteshop/downloaders/git.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def _checkout(name):
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', name),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
_checkout(source_info['tag'])
elif 'branch' in source_info:
_checkout(source_info['branch'])
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import subprocess
from ..utils import DEVNULL, change_working_directory
from .base import DOWNLOAD_CONTAINER
def _checkout(name):
with change_working_directory(DOWNLOAD_CONTAINER):
subprocess.call(
('git', 'checkout', name),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
def download(source_info):
url = source_info['git']
subprocess.call(
('git', 'clone', '--recursive', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
_checkout(source_info['tag'])
elif 'branch' in source_info:
_checkout(source_info['branch'])
|
Allow submodules in lib repo
|
Allow submodules in lib repo
|
Python
|
mit
|
uranusjr/cuteshop
|
---
+++
@@ -17,7 +17,7 @@
def download(source_info):
url = source_info['git']
subprocess.call(
- ('git', 'clone', url, DOWNLOAD_CONTAINER),
+ ('git', 'clone', '--recursive', url, DOWNLOAD_CONTAINER),
stdout=DEVNULL, stderr=subprocess.STDOUT,
)
if 'tag' in source_info:
|
6a9407d7cc4ac5555180a2ee331ff95eef131902
|
mitmproxy/platform/osx.py
|
mitmproxy/platform/osx.py
|
import subprocess
from . import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output:
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://docs.mitmproxy.org/en/latest/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
|
import subprocess
from . import pf
"""
Doing this the "right" way by using DIOCNATLOOK on the pf device turns out
to be a pain. Apple has made a number of modifications to the data
structures returned, and compiling userspace tools to test and work with
this turns out to be a pain in the ass. Parsing pfctl output is short,
simple, and works.
Note: Also Tested with FreeBSD 10 pkgng Python 2.7.x.
Should work almost exactly as on Mac OS X and except with some changes to
the output processing of pfctl (see pf.py).
"""
class Resolver(object):
STATECMD = ("sudo", "-n", "/sbin/pfctl", "-s", "state")
def original_addr(self, csock):
peer = csock.getpeername()
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
if "sudo: a password is required" in e.output.decode(errors="replace"):
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
insufficient_priv = "sudo: a password is required" in stxt.decode(errors="replace")
if insufficient_priv:
raise RuntimeError(
"Insufficient privileges to access pfctl. "
"See http://docs.mitmproxy.org/en/latest/transparent/osx.html for details.")
return pf.lookup(peer[0], peer[1], stxt)
|
Make sudo pfctl error check Python 3 compatible
|
Make sudo pfctl error check Python 3 compatible
In Python 3, subprocess.check_output() returns a sequence of bytes. This change ensures that it will be converted to a string, so the substring test for the sudo error message does not raise a TypeError. This fixes the code in Python 3 while remaining compatible with Python 2.
|
Python
|
mit
|
mitmproxy/mitmproxy,cortesi/mitmproxy,MatthewShao/mitmproxy,MatthewShao/mitmproxy,laurmurclar/mitmproxy,mosajjal/mitmproxy,cortesi/mitmproxy,vhaupert/mitmproxy,mhils/mitmproxy,dwfreed/mitmproxy,ddworken/mitmproxy,gzzhanghao/mitmproxy,StevenVanAcker/mitmproxy,ujjwal96/mitmproxy,StevenVanAcker/mitmproxy,xaxa89/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy,mitmproxy/mitmproxy,jvillacorta/mitmproxy,cortesi/mitmproxy,xaxa89/mitmproxy,gzzhanghao/mitmproxy,laurmurclar/mitmproxy,xaxa89/mitmproxy,jvillacorta/mitmproxy,vhaupert/mitmproxy,mhils/mitmproxy,gzzhanghao/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,Kriechi/mitmproxy,cortesi/mitmproxy,jvillacorta/mitmproxy,MatthewShao/mitmproxy,ujjwal96/mitmproxy,gzzhanghao/mitmproxy,StevenVanAcker/mitmproxy,ddworken/mitmproxy,vhaupert/mitmproxy,dwfreed/mitmproxy,dwfreed/mitmproxy,dufferzafar/mitmproxy,mosajjal/mitmproxy,xaxa89/mitmproxy,Kriechi/mitmproxy,dufferzafar/mitmproxy,vhaupert/mitmproxy,mosajjal/mitmproxy,Kriechi/mitmproxy,laurmurclar/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,StevenVanAcker/mitmproxy,jvillacorta/mitmproxy,mitmproxy/mitmproxy,ddworken/mitmproxy,zlorb/mitmproxy,mhils/mitmproxy,mitmproxy/mitmproxy,dufferzafar/mitmproxy,ujjwal96/mitmproxy,dufferzafar/mitmproxy,mhils/mitmproxy,MatthewShao/mitmproxy,zlorb/mitmproxy,ddworken/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy
|
---
+++
@@ -23,12 +23,12 @@
try:
stxt = subprocess.check_output(self.STATECMD, stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
- if "sudo: a password is required" in e.output:
+ if "sudo: a password is required" in e.output.decode(errors="replace"):
insufficient_priv = True
else:
raise RuntimeError("Error getting pfctl state: " + repr(e))
else:
- insufficient_priv = "sudo: a password is required" in stxt
+ insufficient_priv = "sudo: a password is required" in stxt.decode(errors="replace")
if insufficient_priv:
raise RuntimeError(
|
f7f6a8a1b1f019b45b9f3c3c9c6124469a335798
|
phildb_client/__init__.py
|
phildb_client/__init__.py
|
from client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
from phildb_client.client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Make import of client module explicit
|
Make import of client module explicit
|
Python
|
bsd-3-clause
|
amacd31/phildb_client
|
---
+++
@@ -1,4 +1,4 @@
-from client import PhilDBClient
+from phildb_client.client import PhilDBClient
from ._version import get_versions
__version__ = get_versions()['version']
|
9f17fc03a79434b3d92e4dea00ea33567c806280
|
runner/update_manifest.py
|
runner/update_manifest.py
|
import json
import os
import sys
here = os.path.abspath(os.path.split(__file__)[0])
root = os.path.abspath(os.path.join(here, "..", ".."))
sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts")))
import manifest
def main(request, response):
manifest_path = os.path.join(root, "MANIFEST.json")
manifest.update_manifest(root, **{"rebuild": False,
"local_changes": True,
"path": manifest_path})
return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
|
import json
import os
import sys
here = os.path.abspath(os.path.split(__file__)[0])
root = os.path.abspath(os.path.join(here, "..", ".."))
sys.path.insert(0, os.path.abspath(os.path.join(here, "..", "scripts")))
import manifest
def main(request, response):
path = os.path.join(root, "MANIFEST.json")
manifest_file = manifest.load(path)
manifest.update(root, "/", manifest_file)
manifest.write(manifest_file, path)
return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
|
Update test runner for changes in the manifest API.
|
Update test runner for changes in the manifest API.
|
Python
|
bsd-3-clause
|
frewsxcv/wpt-tools,wpt-on-tv-tf/wpt-tools,wpt-on-tv-tf/wpt-tools,frewsxcv/wpt-tools,kaixinjxq/wpt-tools,UprootStaging/wpt-tools,UprootStaging/wpt-tools,wpt-on-tv-tf/wpt-tools,kaixinjxq/wpt-tools,vivliostyle/wpt-tools,UprootStaging/wpt-tools,frewsxcv/wpt-tools,vivliostyle/wpt-tools,kaixinjxq/wpt-tools,vivliostyle/wpt-tools
|
---
+++
@@ -10,9 +10,9 @@
import manifest
def main(request, response):
- manifest_path = os.path.join(root, "MANIFEST.json")
- manifest.update_manifest(root, **{"rebuild": False,
- "local_changes": True,
- "path": manifest_path})
+ path = os.path.join(root, "MANIFEST.json")
+ manifest_file = manifest.load(path)
+ manifest.update(root, "/", manifest_file)
+ manifest.write(manifest_file, path)
return [("Content-Type", "application/json")], json.dumps({"url": "/MANIFEST.json"})
|
6fab7a8170cbd993400b097478f328024c3f9247
|
ezdaemon/__init__.py
|
ezdaemon/__init__.py
|
"""Daemonize makes Unix-y daemons real easy. Just import daemonize.daemonize
and call the function before whatever you want the daemon to be. A couple
gotchas:
1) It will disconnect your python process from stdin and stdout, so any
print calls will not show up. This is because daemons are disconnected
from any controlling terminal.
2) Similarly, the working directory is changed to the root folder. This
is to prevent lockup in case any virtual volumes are unmounted. Just make
sure any IO uses the absolute path.
"""
from daemon import daemonize
if __name__ == "__main__":
assert daemonize
print "imported ok"
|
"""ezdaemon makes Unix-y daemons real easy. Just import ezdaemon.daemonize
and call it before whatever you want the daemon to be. A couple gotchas:
1. It will disconnect your python process from stdin and stdout, so any
print calls will not show up. This is because daemons are disconnected
from any controlling terminal.
2. Similarly, the working directory is changed to the root folder. This
is to prevent lockup in case any virtual volumes are unmounted. Just make
sure any IO uses the absolute path.
Example:
from ezdaemon import daemonize
def do_daemony_stuff():
# your code here
logfile = open('/some/absolute/path.log').read() # IO needs abspath
if __name__ == "__main__":
print "before daemon" # this will print
daemonize()
print "in daemon" # this will go to /dev/null (aka not print)
do_daemony_stuff()
"""
from daemon import daemonize
if __name__ == "__main__":
assert daemonize
print "ezdaemon.daemonize imported ok"
|
Make init docstring reflect README
|
Make init docstring reflect README
|
Python
|
mit
|
cjeffers/ezdaemon
|
---
+++
@@ -1,14 +1,26 @@
-"""Daemonize makes Unix-y daemons real easy. Just import daemonize.daemonize
-and call the function before whatever you want the daemon to be. A couple
-gotchas:
+"""ezdaemon makes Unix-y daemons real easy. Just import ezdaemon.daemonize
+and call it before whatever you want the daemon to be. A couple gotchas:
- 1) It will disconnect your python process from stdin and stdout, so any
- print calls will not show up. This is because daemons are disconnected
- from any controlling terminal.
+1. It will disconnect your python process from stdin and stdout, so any
+ print calls will not show up. This is because daemons are disconnected
+ from any controlling terminal.
+2. Similarly, the working directory is changed to the root folder. This
+ is to prevent lockup in case any virtual volumes are unmounted. Just make
+ sure any IO uses the absolute path.
- 2) Similarly, the working directory is changed to the root folder. This
- is to prevent lockup in case any virtual volumes are unmounted. Just make
- sure any IO uses the absolute path.
+Example:
+
+ from ezdaemon import daemonize
+
+ def do_daemony_stuff():
+ # your code here
+ logfile = open('/some/absolute/path.log').read() # IO needs abspath
+
+ if __name__ == "__main__":
+ print "before daemon" # this will print
+ daemonize()
+ print "in daemon" # this will go to /dev/null (aka not print)
+ do_daemony_stuff()
"""
from daemon import daemonize
@@ -16,4 +28,4 @@
if __name__ == "__main__":
assert daemonize
- print "imported ok"
+ print "ezdaemon.daemonize imported ok"
|
c0a41a602fb7fa2ef0a6472f8c6ca00a7acfc7f1
|
app/youtube.py
|
app/youtube.py
|
# Load data for videos in Youtube playlist
# https://www.youtube.com/playlist?list=PLy7eek8wTbV9OtrbY3CJo5mRWnhuwTen0
## TODO handle videos for multiple months
# all_videos dict contains videos by day.
import pafy
from datetime import datetime
# TODO import app key
# pafy.set_api_key(key)
print("Loading Youtube video playlist")
playlist = pafy.get_playlist("PLy7eek8wTbV9OtrbY3CJo5mRWnhuwTen0")
videos = playlist['items']
all_videos = {}
for vid in videos:
# Date is stored in video title
vid_date_str = vid['playlist_meta']['title']
vid_day = datetime.strptime(vid_date_str, "%Y-%m-%d")
year, month, day = vid_day.year, vid_day.month, vid_day.day
if not all_videos.get(year):
all_videos[year] = {}
if not all_videos[year].get(month):
all_videos[year][month] = {}
try:
all_videos[year][month][day] = vid['pafy']
except:
print("Cant add " + vid_title)
|
# Load data for videos in Youtube playlist
# Uses video title as date, formatted as 20151230
## TODO handle videos for multiple months
# all_videos dict contains videos by day.
import pafy
from datetime import datetime
# TODO import app key
# pafy.set_api_key(key)
print("Loading Youtube video playlist")
playlist = pafy.get_playlist("PLXSngHQzQiiI8DyrElGi_N_rv-8ToJvIT")
videos = playlist['items']
all_videos = {}
for vid in videos:
# Date is stored in video title
vid_date_str = vid['playlist_meta']['title']
vid_day = datetime.strptime(vid_date_str, "%Y%m%d")
year, month, day = vid_day.year, vid_day.month, vid_day.day
if not all_videos.get(year):
all_videos[year] = {}
if not all_videos[year].get(month):
all_videos[year][month] = {}
try:
all_videos[year][month][day] = vid['pafy']
except:
print("Cant add " + vid_date_str)
|
Change playlist to a new.
|
Change playlist to a new.
|
Python
|
mit
|
jasalt/weatherlapse,jasalt/weatherlapse,jasalt/tiea207-demo,jasalt/tiea207-demo
|
---
+++
@@ -1,5 +1,5 @@
# Load data for videos in Youtube playlist
-# https://www.youtube.com/playlist?list=PLy7eek8wTbV9OtrbY3CJo5mRWnhuwTen0
+# Uses video title as date, formatted as 20151230
## TODO handle videos for multiple months
# all_videos dict contains videos by day.
@@ -12,7 +12,7 @@
print("Loading Youtube video playlist")
-playlist = pafy.get_playlist("PLy7eek8wTbV9OtrbY3CJo5mRWnhuwTen0")
+playlist = pafy.get_playlist("PLXSngHQzQiiI8DyrElGi_N_rv-8ToJvIT")
videos = playlist['items']
all_videos = {}
@@ -20,7 +20,7 @@
# Date is stored in video title
vid_date_str = vid['playlist_meta']['title']
- vid_day = datetime.strptime(vid_date_str, "%Y-%m-%d")
+ vid_day = datetime.strptime(vid_date_str, "%Y%m%d")
year, month, day = vid_day.year, vid_day.month, vid_day.day
@@ -32,4 +32,4 @@
try:
all_videos[year][month][day] = vid['pafy']
except:
- print("Cant add " + vid_title)
+ print("Cant add " + vid_date_str)
|
b262d53e8347ea666cb5cd46bc9e19b7944cf7e6
|
core/data/DataWriter.py
|
core/data/DataWriter.py
|
"""
DataWriter.py
"""
from DataController import DataController
from DataReader import DataReader
from vtk import vtkMetaImageWriter
from vtk import vtkXMLImageDataWriter
class DataWriter(DataController):
"""
DataWriter writes an image data object to
disk using the provided format.
"""
def __init__(self):
super(DataWriter, self).__init__()
self.supportedExtensions = [DataReader.TypeMHD,
DataReader.TypeVTI]
def WriteToFile(self, imageData, exportFileName, fileType):
if fileType == DataReader.TypeMHD:
if not exportFileName.endswith(".mhd"):
exportFileName = exportFileName + ".mhd"
writer = vtkMetaImageWriter()
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
elif fileType == DataReader.TypeVTI:
writer = vtkXMLImageDataWriter()
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
else:
raise NotImplementedError("No writing support for type " + str(fileType))
|
"""
DataWriter.py
"""
from DataController import DataController
from DataReader import DataReader
from vtk import vtkMetaImageWriter
from vtk import vtkXMLImageDataWriter
class DataWriter(DataController):
"""
DataWriter writes an image data object to
disk using the provided format.
"""
def __init__(self):
super(DataWriter, self).__init__()
self.supportedExtensions = [DataReader.TypeMHD,
DataReader.TypeVTI,
DataReader.TypeMHA]
def WriteToFile(self, imageData, exportFileName, fileType):
if fileType == DataReader.TypeMHD:
if not exportFileName.endswith(".mhd"):
exportFileName = exportFileName + ".mhd"
writer = vtkMetaImageWriter()
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
elif fileType == DataReader.TypeVTI:
writer = vtkXMLImageDataWriter()
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
elif fileType == DataReader.TypeMHA:
writer = vtkMetaImageWriter()
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
else:
raise NotImplementedError("No writing support for type " + str(fileType))
|
Build in support for writing mha files.
|
Build in support for writing mha files.
|
Python
|
mit
|
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
|
---
+++
@@ -17,7 +17,8 @@
super(DataWriter, self).__init__()
self.supportedExtensions = [DataReader.TypeMHD,
- DataReader.TypeVTI]
+ DataReader.TypeVTI,
+ DataReader.TypeMHA]
def WriteToFile(self, imageData, exportFileName, fileType):
if fileType == DataReader.TypeMHD:
@@ -32,5 +33,10 @@
writer.SetFileName(exportFileName)
writer.SetInputData(imageData)
writer.Write()
+ elif fileType == DataReader.TypeMHA:
+ writer = vtkMetaImageWriter()
+ writer.SetFileName(exportFileName)
+ writer.SetInputData(imageData)
+ writer.Write()
else:
raise NotImplementedError("No writing support for type " + str(fileType))
|
a806d55b7cb2c554357895ca441f30c906aa1fc1
|
application.py
|
application.py
|
from canis import siriusxm, spotify, oauth
def main():
try:
current = siriusxm.get_currently_playing('siriusxmu')
spotify_id = spotify.id_for_song(current)
print(current, spotify_id)
except Exception, e:
print "Error {}".format(e)
if __name__ == "__main__":
oauth.app.run()
main()
|
from time import sleep
from datetime import datetime
from canis import siriusxm, spotify, oauth
def main():
channels = ['siriusxmu', 'altnation']
while True:
if oauth.expiration > datetime.utcnow():
oauth.refresh()
for channel in channels:
try:
current = siriusxm.get_currently_playing(channel)
spotify_id = spotify.id_for_song(current)
print '{} - {}'.format(current, spotify_id)
except Exception, e:
print "Error {}".format(e)
sleep(60)
if __name__ == "__main__":
oauth.app.run()
main()
|
Restructure error handling a bit
|
Restructure error handling a bit
|
Python
|
mit
|
maxgoedjen/canis
|
---
+++
@@ -1,12 +1,20 @@
+from time import sleep
+from datetime import datetime
from canis import siriusxm, spotify, oauth
def main():
- try:
- current = siriusxm.get_currently_playing('siriusxmu')
- spotify_id = spotify.id_for_song(current)
- print(current, spotify_id)
- except Exception, e:
- print "Error {}".format(e)
+ channels = ['siriusxmu', 'altnation']
+ while True:
+ if oauth.expiration > datetime.utcnow():
+ oauth.refresh()
+ for channel in channels:
+ try:
+ current = siriusxm.get_currently_playing(channel)
+ spotify_id = spotify.id_for_song(current)
+ print '{} - {}'.format(current, spotify_id)
+ except Exception, e:
+ print "Error {}".format(e)
+ sleep(60)
if __name__ == "__main__":
oauth.app.run()
|
cae43a00c1a9421194721601c0bebc3468f134e4
|
sekh/utils.py
|
sekh/utils.py
|
"""Utils for django-sekh"""
import re
from itertools import izip
def remove_duplicates(items):
"""
Remove duplicates elements in a list preserving the order.
"""
seen = {}
result = []
for item in items:
item = item.strip()
if not item or item in seen:
continue
seen[item] = True
result.append(item)
return result
def compile_terms(terms):
"""
Compile terms as regular expression,
for better matching.
"""
return [re.compile(re.escape(term), re.I | re.U)
for term in terms]
def list_range(x):
"""
Returns the range of a list.
"""
return max(x) - min(x)
def get_window(positions, indices):
"""
Given a list of lists and an index for each of those lists,
this returns a list of all of the corresponding values for those
lists and their respective index.
"""
return [word_positions[index] for
word_positions, index in
izip(positions, indices)]
def get_min_index(positions, window):
"""
Given a list of lists representing term positions in a corpus,
this returns the index of the min term, or nothing if None left.
"""
for min_index in [window.index(i) for i in sorted(window)]:
if window[min_index] < positions[min_index][-1]:
return min_index
return None
|
"""Utils for django-sekh"""
from future_builtins import zip
import re
def remove_duplicates(items):
"""
Remove duplicates elements in a list preserving the order.
"""
seen = {}
result = []
for item in items:
item = item.strip()
if not item or item in seen:
continue
seen[item] = True
result.append(item)
return result
def compile_terms(terms):
"""
Compile terms as regular expression,
for better matching.
"""
return [re.compile(re.escape(term), re.I | re.U)
for term in terms]
def list_range(x):
"""
Returns the range of a list.
"""
return max(x) - min(x)
def get_window(positions, indices):
"""
Given a list of lists and an index for each of those lists,
this returns a list of all of the corresponding values for those
lists and their respective index.
"""
return [word_positions[index] for
word_positions, index in
zip(positions, indices)]
def get_min_index(positions, window):
"""
Given a list of lists representing term positions in a corpus,
this returns the index of the min term, or nothing if None left.
"""
for min_index in [window.index(i) for i in sorted(window)]:
if window[min_index] < positions[min_index][-1]:
return min_index
return None
|
Use zip from future_builtins for Python 2 and 3 compatibility
|
Use zip from future_builtins for Python 2 and 3 compatibility
|
Python
|
bsd-3-clause
|
Fantomas42/django-sekh
|
---
+++
@@ -1,6 +1,7 @@
"""Utils for django-sekh"""
+from future_builtins import zip
+
import re
-from itertools import izip
def remove_duplicates(items):
@@ -42,7 +43,7 @@
"""
return [word_positions[index] for
word_positions, index in
- izip(positions, indices)]
+ zip(positions, indices)]
def get_min_index(positions, window):
|
fc6acce0667d23c0f0b51d67c5899cf979d37516
|
kindred/pycorenlp.py
|
kindred/pycorenlp.py
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
useSessions = False
sessions = {}
def __init__(self, server_url):
self.server_url = server_url
if StanfordCoreNLP.useSessions:
if not server_url in StanfordCoreNLP.sessions:
StanfordCoreNLP.sessions[server_url] = requests.Session()
self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
#print('X',text)
data = text.encode('utf8')
if StanfordCoreNLP.useSessions:
r = self.session.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
else:
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
# Temporary inclusion of pycorenlp code for easier edits
# https://github.com/smilli/py-corenlp
import json, requests
import six
class StanfordCoreNLP:
def __init__(self, server_url):
self.server_url = server_url
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
data = text.encode('utf8')
r = requests.post(
self.server_url, params={
'properties': str(properties)
}, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
return output
|
Remove experimental CoreNLP session code
|
Remove experimental CoreNLP session code
|
Python
|
mit
|
jakelever/kindred,jakelever/kindred
|
---
+++
@@ -6,33 +6,19 @@
class StanfordCoreNLP:
- useSessions = False
- sessions = {}
-
def __init__(self, server_url):
self.server_url = server_url
- if StanfordCoreNLP.useSessions:
- if not server_url in StanfordCoreNLP.sessions:
- StanfordCoreNLP.sessions[server_url] = requests.Session()
- self.session = StanfordCoreNLP.sessions[server_url]
def annotate(self, text, properties={}):
assert isinstance(text, six.string_types),"text must be a string, received %s" % (str(type(text)))
assert isinstance(properties, dict)
- #print('X',text)
data = text.encode('utf8')
- if StanfordCoreNLP.useSessions:
- r = self.session.post(
- self.server_url, params={
- 'properties': str(properties)
- }, data=data, headers={'Connection': 'close'})
- else:
- r = requests.post(
- self.server_url, params={
- 'properties': str(properties)
- }, data=data, headers={'Connection': 'close'})
+ r = requests.post(
+ self.server_url, params={
+ 'properties': str(properties)
+ }, data=data, headers={'Connection': 'close'})
assert 'outputFormat' in properties and properties['outputFormat'] == 'json'
output = json.loads(r.text, encoding='utf-8', strict=False)
|
3275d31861f9cccc623e7ae8c83198a48a75f82a
|
events/createMatchEvent.py
|
events/createMatchEvent.py
|
from common.log import logUtils as log
from constants import clientPackets
from constants import exceptions
from objects import glob
def handle(userToken, packetData):
try:
# get usertoken data
userID = userToken.userID
# Read packet data
packetData = clientPackets.createMatch(packetData)
# Create a match object
# TODO: Player number check
matchID = glob.matches.createMatch(packetData["matchName"], packetData["matchPassword"], packetData["beatmapID"], packetData["beatmapName"], packetData["beatmapMD5"], packetData["gameMode"], userID)
# Make sure the match has been created
if matchID not in glob.matches.matches:
raise exceptions.matchCreateError()
with glob.matches.matches[matchID] as match:
# Join that match
userToken.joinMatch(matchID)
# Give host to match creator
match.setHost(userID)
match.sendUpdates()
match.changePassword(packetData["matchPassword"])
except exceptions.matchCreateError:
log.error("Error while creating match!")
|
from common.log import logUtils as log
from constants import clientPackets, serverPackets
from constants import exceptions
from objects import glob
def handle(userToken, packetData):
try:
# get usertoken data
userID = userToken.userID
# Read packet data
packetData = clientPackets.createMatch(packetData)
# Make sure the name is valid
matchName = packetData["matchName"].strip()
if not matchName:
raise exceptions.matchCreateError()
# Create a match object
# TODO: Player number check
matchID = glob.matches.createMatch(matchName, packetData["matchPassword"].strip(), packetData["beatmapID"], packetData["beatmapName"], packetData["beatmapMD5"], packetData["gameMode"], userID)
# Make sure the match has been created
if matchID not in glob.matches.matches:
raise exceptions.matchCreateError()
with glob.matches.matches[matchID] as match:
# Join that match
userToken.joinMatch(matchID)
# Give host to match creator
match.setHost(userID)
match.sendUpdates()
match.changePassword(packetData["matchPassword"])
except exceptions.matchCreateError:
log.error("Error while creating match!")
userToken.enqueue(serverPackets.matchJoinFail())
|
Handle matches with no name
|
Handle matches with no name
|
Python
|
agpl-3.0
|
osuripple/pep.py,osuripple/pep.py
|
---
+++
@@ -1,5 +1,5 @@
from common.log import logUtils as log
-from constants import clientPackets
+from constants import clientPackets, serverPackets
from constants import exceptions
from objects import glob
@@ -12,9 +12,14 @@
# Read packet data
packetData = clientPackets.createMatch(packetData)
+ # Make sure the name is valid
+ matchName = packetData["matchName"].strip()
+ if not matchName:
+ raise exceptions.matchCreateError()
+
# Create a match object
# TODO: Player number check
- matchID = glob.matches.createMatch(packetData["matchName"], packetData["matchPassword"], packetData["beatmapID"], packetData["beatmapName"], packetData["beatmapMD5"], packetData["gameMode"], userID)
+ matchID = glob.matches.createMatch(matchName, packetData["matchPassword"].strip(), packetData["beatmapID"], packetData["beatmapName"], packetData["beatmapMD5"], packetData["gameMode"], userID)
# Make sure the match has been created
if matchID not in glob.matches.matches:
@@ -30,3 +35,4 @@
match.changePassword(packetData["matchPassword"])
except exceptions.matchCreateError:
log.error("Error while creating match!")
+ userToken.enqueue(serverPackets.matchJoinFail())
|
d04a0000d231b1a597992bd28ab4ab8de27667e2
|
cron/updateGameCache.py
|
cron/updateGameCache.py
|
import urllib2
urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
|
import urllib.request
urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
|
Update cron job to use python3
|
Update cron job to use python3
|
Python
|
mit
|
rewphus/tidbitsdev,Clidus/gwl,rewphus/tidbitsdev,Clidus/gwl,rewphus/tidbitsdev,rewphus/tidbitsdev,Clidus/gwl,Clidus/gwl
|
---
+++
@@ -1,2 +1,2 @@
-import urllib2
-urllib2.urlopen('http://www.gamingwithlemons.com/cron/update')
+import urllib.request
+urllib.request.urlopen('http://www.gamingwithlemons.com/cron/update')
|
8e8545c024e307a4878cdb93a79b854afc84fad5
|
nyucal/cli.py
|
nyucal/cli.py
|
# -*- coding: utf-8 -*-
"""Console script for nyucal."""
import io
import click
from lxml import html
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("Replace this message by putting your code into "
"nyucal.cli.main")
click.echo("See click documentation at http://click.pocoo.org/")
@main.command()
def list(source=None):
if source is None:
source = "https://www.nyu.edu/registrar/calendars/university-academic-calendar.html?display=2" # noqa
text = requests.get(source).text
try:
store = nyucal.CalendarStore(io.StringIO(text))
for line in store.calendar_names:
click.echo(line)
except OSError:
# text is such a big string that python's stack trace was flooded out by
# emitting the string. So I checked on the exception's class name to find the
# error type. Then I guessed.
click.echo("Something is wrong")
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
"""Console script for nyucal.
See click documentation at http://click.pocoo.org/
"""
import io
import click
from nyucal import nyucal
import requests
@click.group()
def main(args=None):
"""Console script for nyucal."""
click.echo("cli for nyucal")
@main.command()
def list(source=None):
"""List the available calendars in the calendar source"""
if source is None:
source = nyucal.SOURCE_URL # noqa
store = nyucal.CalendarStore(source)
for line in store.calendar_names:
click.echo(line)
if __name__ == "__main__":
main()
|
Use the module variable for source URL
|
Use the module variable for source URL
|
Python
|
mit
|
nyumathclinic/nyucal,nyumathclinic/nyucal
|
---
+++
@@ -1,11 +1,14 @@
# -*- coding: utf-8 -*-
-"""Console script for nyucal."""
+"""Console script for nyucal.
+
+See click documentation at http://click.pocoo.org/
+"""
import io
import click
-from lxml import html
+
from nyucal import nyucal
import requests
@@ -13,25 +16,17 @@
@click.group()
def main(args=None):
"""Console script for nyucal."""
- click.echo("Replace this message by putting your code into "
- "nyucal.cli.main")
- click.echo("See click documentation at http://click.pocoo.org/")
+ click.echo("cli for nyucal")
@main.command()
def list(source=None):
+ """List the available calendars in the calendar source"""
if source is None:
- source = "https://www.nyu.edu/registrar/calendars/university-academic-calendar.html?display=2" # noqa
- text = requests.get(source).text
- try:
- store = nyucal.CalendarStore(io.StringIO(text))
- for line in store.calendar_names:
- click.echo(line)
- except OSError:
- # text is such a big string that python's stack trace was flooded out by
- # emitting the string. So I checked on the exception's class name to find the
- # error type. Then I guessed.
- click.echo("Something is wrong")
+ source = nyucal.SOURCE_URL # noqa
+ store = nyucal.CalendarStore(source)
+ for line in store.calendar_names:
+ click.echo(line)
if __name__ == "__main__":
|
a94aa2d9aa58a7c2df289588eb4f16d83725ce8f
|
numba/exttypes/tests/test_vtables.py
|
numba/exttypes/tests/test_vtables.py
|
__author__ = 'mark'
|
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import numba as nb
from numba import *
from numba.minivect.minitypes import FunctionType
from numba.exttypes import virtual
from numba.exttypes import ordering
from numba.exttypes import methodtable
from numba.exttypes.signatures import Method
from numba.testing.test_support import parametrize, main
class py_class(object):
pass
def myfunc1(a):
pass
def myfunc2(a, b):
pass
def myfunc3(a, b, c):
pass
types = list(nb.numeric) + [object_]
array_types = [t[:] for t in types]
array_types += [t[:, :] for t in types]
array_types += [t[:, :, :] for t in types]
all_types = types + array_types
def method(func, name, sig):
return Method(func, name, sig, False, False)
make_methods1 = lambda: [
method(myfunc1, 'method', FunctionType(argtype, [argtype]))
for argtype in all_types]
make_methods2 = lambda: [
method(myfunc2, 'method', FunctionType(argtype1, [argtype1, argtype2]))
for argtype1 in all_types
for argtype2 in all_types]
def make_table(methods):
table = methodtable.VTabType(py_class, [])
table.create_method_ordering()
for i, method in enumerate(make_methods1()):
key = method.name, method.signature.args
method.lfunc_pointer = i
table.specialized_methods[key] = method
assert len(methods) == len(table.specialized_methods)
return table
def make_hashtable(methods):
table = make_table(methods)
hashtable = virtual.build_hashing_vtab(table)
return hashtable
#------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------
@parametrize(make_methods1())
def test_specializations(methods):
hashtable = make_hashtable(methods)
print(hashtable)
for i, method in enumerate(methods):
key = virtual.sep201_signature_string(method.signature, method.name)
assert hashtable.find_method(key), (i, method, key)
if __name__ == '__main__':
main()
|
Add test for hash-based vtable creation
|
Add test for hash-based vtable creation
|
Python
|
bsd-2-clause
|
cpcloud/numba,ssarangi/numba,jriehl/numba,stuartarchibald/numba,pombredanne/numba,stefanseefeld/numba,stuartarchibald/numba,pitrou/numba,seibert/numba,cpcloud/numba,ssarangi/numba,IntelLabs/numba,IntelLabs/numba,sklam/numba,cpcloud/numba,seibert/numba,GaZ3ll3/numba,stonebig/numba,GaZ3ll3/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,stefanseefeld/numba,stefanseefeld/numba,gmarkall/numba,pombredanne/numba,stonebig/numba,shiquanwang/numba,pombredanne/numba,pitrou/numba,numba/numba,pitrou/numba,sklam/numba,IntelLabs/numba,gdementen/numba,gdementen/numba,numba/numba,gdementen/numba,stonebig/numba,stefanseefeld/numba,pitrou/numba,numba/numba,stonebig/numba,IntelLabs/numba,pitrou/numba,gmarkall/numba,gmarkall/numba,gdementen/numba,GaZ3ll3/numba,numba/numba,GaZ3ll3/numba,ssarangi/numba,pombredanne/numba,jriehl/numba,jriehl/numba,shiquanwang/numba,stuartarchibald/numba,gmarkall/numba,seibert/numba,stuartarchibald/numba,jriehl/numba,shiquanwang/numba,numba/numba,stefanseefeld/numba,jriehl/numba,sklam/numba,seibert/numba,seibert/numba,GaZ3ll3/numba,stuartarchibald/numba,cpcloud/numba,sklam/numba,pombredanne/numba,ssarangi/numba,gmarkall/numba,gdementen/numba,sklam/numba,ssarangi/numba
|
---
+++
@@ -1 +1,78 @@
-__author__ = 'mark'
+# -*- coding: utf-8 -*-
+from __future__ import print_function, division, absolute_import
+
+import numba as nb
+from numba import *
+from numba.minivect.minitypes import FunctionType
+from numba.exttypes import virtual
+from numba.exttypes import ordering
+from numba.exttypes import methodtable
+from numba.exttypes.signatures import Method
+from numba.testing.test_support import parametrize, main
+
+class py_class(object):
+ pass
+
+def myfunc1(a):
+ pass
+
+def myfunc2(a, b):
+ pass
+
+def myfunc3(a, b, c):
+ pass
+
+types = list(nb.numeric) + [object_]
+
+array_types = [t[:] for t in types]
+array_types += [t[:, :] for t in types]
+array_types += [t[:, :, :] for t in types]
+
+all_types = types + array_types
+
+def method(func, name, sig):
+ return Method(func, name, sig, False, False)
+
+make_methods1 = lambda: [
+ method(myfunc1, 'method', FunctionType(argtype, [argtype]))
+ for argtype in all_types]
+
+make_methods2 = lambda: [
+ method(myfunc2, 'method', FunctionType(argtype1, [argtype1, argtype2]))
+ for argtype1 in all_types
+ for argtype2 in all_types]
+
+
+def make_table(methods):
+ table = methodtable.VTabType(py_class, [])
+ table.create_method_ordering()
+
+ for i, method in enumerate(make_methods1()):
+ key = method.name, method.signature.args
+ method.lfunc_pointer = i
+ table.specialized_methods[key] = method
+
+ assert len(methods) == len(table.specialized_methods)
+
+ return table
+
+def make_hashtable(methods):
+ table = make_table(methods)
+ hashtable = virtual.build_hashing_vtab(table)
+ return hashtable
+
+#------------------------------------------------------------------------
+# Tests
+#------------------------------------------------------------------------
+
+@parametrize(make_methods1())
+def test_specializations(methods):
+ hashtable = make_hashtable(methods)
+ print(hashtable)
+
+ for i, method in enumerate(methods):
+ key = virtual.sep201_signature_string(method.signature, method.name)
+ assert hashtable.find_method(key), (i, method, key)
+
+if __name__ == '__main__':
+ main()
|
8e9de7c0df2f37c40d40b32612aae8e351c748b4
|
class4/exercise1.py
|
class4/exercise1.py
|
#!/usr/bin/python
from getpass import getpass
import time
import paramiko
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
password = getpass()
ssh_port = 8022
remote_conn_pre = paramiko.SSHClient()
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
remote_conn_pre.connect(ip_addr, username=username, password=password, look_for_keys=False, allow_agent=False, port=ssh_port)
remote_conn = remote_conn_pre.invoke_shell()
output = remote_conn.recv(65535)
print output
outp = remote_conn.send("terminal length 0\n")
time.sleep(2)
outp = remote_conn.recv(65535)
print outp
remote_conn.send("show version\n")
time.sleep(2)
output = remote_conn.recv(65535)
print output
if __name__ == "__main__":
main()
|
# Use Paramiko to retrieve the entire 'show version' output from pynet-rtr2.
#!/usr/bin/python
from getpass import getpass
import time
import paramiko
def main():
ip_addr = '50.76.53.27'
username = 'pyclass'
password = getpass()
ssh_port = 8022
remote_conn_pre = paramiko.SSHClient()
remote_conn_pre.set_missing_host_key_policy(paramiko.AutoAddPolicy())
remote_conn_pre.connect(ip_addr, username=username, password=password, look_for_keys=False, allow_agent=False, port=ssh_port)
remote_conn = remote_conn_pre.invoke_shell()
output = remote_conn.recv(65535)
print output
outp = remote_conn.send("terminal length 0\n")
time.sleep(2)
outp = remote_conn.recv(65535)
print outp
remote_conn.send("show version\n")
time.sleep(2)
output = remote_conn.recv(65535)
print output
if __name__ == "__main__":
main()
|
Use Paramiko to retrieve the entire 'show version' output from pynet-rtr2.
|
Use Paramiko to retrieve the entire 'show version' output from pynet-rtr2.
|
Python
|
apache-2.0
|
linkdebian/pynet_course
|
---
+++
@@ -1,3 +1,5 @@
+# Use Paramiko to retrieve the entire 'show version' output from pynet-rtr2.
+
#!/usr/bin/python
from getpass import getpass
import time
|
81ca54adbfdb605cd63674134144e058c46bab5f
|
nalaf/features/embeddings.py
|
nalaf/features/embeddings.py
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
self.model = Word2Vec.load(model_file)
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = self.weight * value.item()
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
from nalaf.features import FeatureGenerator
from gensim.models import Word2Vec
class WordEmbeddingsFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, additive=0, multiplicative=1):
self.model = Word2Vec.load(model_file)
self.additive = additive
self.multiplicative = multiplicative
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word.lower() in self.model:
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
class BrownClusteringFeatureGenerator(FeatureGenerator):
"""
DOCSTRING
"""
def __init__(self, model_file, weight=1):
with open(model_file, encoding='utf-8') as file:
self.clusters = {str(line.split()[1]): line.split()[0] for line in file.readlines()}
self.weight = weight
def generate(self, dataset):
"""
:type dataset: nalaf.structures.data.Dataset
"""
for token in dataset.tokens():
if token.word in self.clusters:
assignment = self.clusters[token.word]
for i in range(len(assignment)):
token.features['brown'] = assignment[:i+1]
|
Make WE use additive and multiplicative constants
|
Make WE use additive and multiplicative constants
|
Python
|
apache-2.0
|
Rostlab/nalaf
|
---
+++
@@ -7,9 +7,10 @@
DOCSTRING
"""
- def __init__(self, model_file, weight=1):
+ def __init__(self, model_file, additive=0, multiplicative=1):
self.model = Word2Vec.load(model_file)
- self.weight = weight
+ self.additive = additive
+ self.multiplicative = multiplicative
def generate(self, dataset):
"""
@@ -20,7 +21,7 @@
for index, value in enumerate(self.model[token.word.lower()]):
# value.item() since value is a numpy float
# and we want native python floats
- token.features['embedding_{}'.format(index)] = self.weight * value.item()
+ token.features['embedding_{}'.format(index)] = (self.additive + value.item()) * self.multiplicative
class BrownClusteringFeatureGenerator(FeatureGenerator):
|
984d8626a146770fe93d54ae107cd33dc3d2f481
|
dbmigrator/commands/init_schema_migrations.py
|
dbmigrator/commands/init_schema_migrations.py
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from .. import utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', **kwargs):
cursor.execute("""\
CREATE TABLE IF NOT EXISTS schema_migrations (
version TEXT NOT NULL
)""")
cursor.execute("""\
DELETE FROM schema_migrations""")
versions = []
for version, name in utils.get_migrations(migrations_directory):
versions.append((version,))
cursor.executemany("""\
INSERT INTO schema_migrations VALUES (%s)
""", versions)
def cli_loader(parser):
return cli_command
|
# -*- coding: utf-8 -*-
# ###
# Copyright (c) 2015, Rice University
# This software is subject to the provisions of the GNU Affero General
# Public License version 3 (AGPLv3).
# See LICENCE.txt for details.
# ###
from .. import utils
__all__ = ('cli_loader',)
@utils.with_cursor
def cli_command(cursor, migrations_directory='', **kwargs):
cursor.execute("""\
CREATE TABLE IF NOT EXISTS schema_migrations (
version TEXT NOT NULL,
applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
)""")
cursor.execute("""\
DELETE FROM schema_migrations""")
versions = []
for version, name in utils.get_migrations(migrations_directory):
versions.append((version,))
cursor.executemany("""\
INSERT INTO schema_migrations VALUES (%s)
""", versions)
def cli_loader(parser):
return cli_command
|
Add "applied" timestamp to schema migrations table
|
Add "applied" timestamp to schema migrations table
|
Python
|
agpl-3.0
|
karenc/db-migrator
|
---
+++
@@ -16,7 +16,8 @@
def cli_command(cursor, migrations_directory='', **kwargs):
cursor.execute("""\
CREATE TABLE IF NOT EXISTS schema_migrations (
- version TEXT NOT NULL
+ version TEXT NOT NULL,
+ applied TIMESTAMP WITH TIME ZONE DEFAULT CURRENT_TIMESTAMP
)""")
cursor.execute("""\
DELETE FROM schema_migrations""")
|
8dbea15b789227d55972512307feb8f40f5d11a1
|
git_upstream_diff.py
|
git_upstream_diff.py
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import sys
import subprocess2
from git_common import current_branch, get_or_create_merge_base, config_list
from git_common import GIT_EXE
def main(args):
default_args = config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
parser.add_argument('--wordwise', action='store_true', default=False,
help=(
'Print a colorized wordwise diff '
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
cmd = [GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
cmd += [get_or_create_merge_base(current_branch())]
cmd += extra_args
subprocess2.check_call(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import sys
import subprocess2
import git_common as git
def main(args):
default_args = git.config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
parser.add_argument('--wordwise', action='store_true', default=False,
help=(
'Print a colorized wordwise diff '
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
cur = git.current_branch()
if not cur or cur == 'HEAD':
print 'fatal: Cannot perform git-upstream-diff while not on a branch'
return 1
par = git.upstream(cur)
if not par:
print 'fatal: No upstream configured for branch \'%s\'' % cur
return 1
cmd = [git.GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
cmd += [git.get_or_create_merge_base(cur, par)]
cmd += extra_args
subprocess2.check_call(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Make udiff print reasonable errors while not on a branch.
|
Make udiff print reasonable errors while not on a branch.
R=agable@chromium.org
BUG=
Review URL: https://codereview.chromium.org/212493002
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@259647 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
---
+++
@@ -8,11 +8,10 @@
import subprocess2
-from git_common import current_branch, get_or_create_merge_base, config_list
-from git_common import GIT_EXE
+import git_common as git
def main(args):
- default_args = config_list('depot-tools.upstream-diff.default-args')
+ default_args = git.config_list('depot-tools.upstream-diff.default-args')
args = default_args + args
parser = argparse.ArgumentParser()
@@ -22,10 +21,20 @@
'instead of line-wise diff'))
opts, extra_args = parser.parse_known_args(args)
- cmd = [GIT_EXE, 'diff', '--patience', '-C', '-C']
+ cur = git.current_branch()
+ if not cur or cur == 'HEAD':
+ print 'fatal: Cannot perform git-upstream-diff while not on a branch'
+ return 1
+
+ par = git.upstream(cur)
+ if not par:
+ print 'fatal: No upstream configured for branch \'%s\'' % cur
+ return 1
+
+ cmd = [git.GIT_EXE, 'diff', '--patience', '-C', '-C']
if opts.wordwise:
cmd += ['--word-diff=color', r'--word-diff-regex=(\w+|[^[:space:]])']
- cmd += [get_or_create_merge_base(current_branch())]
+ cmd += [git.get_or_create_merge_base(cur, par)]
cmd += extra_args
|
2a285104807b07eba3682796536903254a175170
|
images_of/connect.py
|
images_of/connect.py
|
import praw
from images_of import settings
class Reddit(praw.Reddit):
def oauth(self, **kwargs):
self.set_oauth_app_info(
client_id = kwargs.get('client_id') or settings.CLIENT_ID,
client_secret = kwargs.get('client_secret') or settings.CLIENT_SECRET,
redirect_uri = kwargs.get('redirect_uri') or settings.REDIRECT_URI
)
self.refresh_access_information(
kwargs.get('refresh_token') or settings.REFRESH_TOKEN
)
def login(self, username=None, password=None):
# this is depricated, just ignore the warning.
super().login(
username or settings.USERNAME,
password or settings.PASSWORD,
disable_warning=True
)
|
import praw
from images_of import settings
class Reddit(praw.Reddit):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.config.api_request_delay = 1.0
def oauth(self, **kwargs):
self.set_oauth_app_info(
client_id = kwargs.get('client_id') or settings.CLIENT_ID,
client_secret = kwargs.get('client_secret') or settings.CLIENT_SECRET,
redirect_uri = kwargs.get('redirect_uri') or settings.REDIRECT_URI
)
self.refresh_access_information(
kwargs.get('refresh_token') or settings.REFRESH_TOKEN
)
def login(self, username=None, password=None):
# this is depricated, just ignore the warning.
self.config.api_request_delay = 2.0
super().login(
username or settings.USERNAME,
password or settings.PASSWORD,
disable_warning=True
)
|
Reduce oauth api-delay to 1s.
|
Reduce oauth api-delay to 1s.
|
Python
|
mit
|
amici-ursi/ImagesOfNetwork,scowcron/ImagesOfNetwork
|
---
+++
@@ -3,6 +3,10 @@
from images_of import settings
class Reddit(praw.Reddit):
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ self.config.api_request_delay = 1.0
+
def oauth(self, **kwargs):
self.set_oauth_app_info(
client_id = kwargs.get('client_id') or settings.CLIENT_ID,
@@ -16,6 +20,7 @@
def login(self, username=None, password=None):
# this is depricated, just ignore the warning.
+ self.config.api_request_delay = 2.0
super().login(
username or settings.USERNAME,
password or settings.PASSWORD,
|
cab3289827c859085dff9d492362d6648b52d23f
|
karma.py
|
karma.py
|
from brutal.core.plugin import cmd, match
import collections
karmas = collections.Counter()
@match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$')
def karma_inc(event, name, pluses, *args):
if name == event.meta['nick']:
return 'Not in this universe, maggot!'
else:
karmas[name] += len(pluses)//2
@match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$')
def karma_dec(event, name, minuses, *args):
if name == event.meta['nick']:
return 'Not in this universe, maggot!'
else:
karmas[name] -= len(minuses)//2
@cmd
def karma(event):
"""Returns karma points for a given user."""
args = event.args
if len(args) < 1:
return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']])
user = event.args[0]
if user not in karmas:
karmas[user] = 0
return "{0}'s karma level is: {1}".format(user, karmas[user])
|
from brutal.core.plugin import cmd, match
import collections
karmas = collections.Counter()
@match(regex=r'^([a-zA-Z0-9_]+)((:?\+)+)$')
def karma_inc(event, name, pluses, *args):
if name == event.meta['nick']:
return 'Not in this universe, maggot!'
else:
karmas[name] += len(pluses)//2
@match(regex=r'^([a-zA-Z0-9_]+)((:?\-)+)$')
def karma_dec(event, name, minuses, *args):
if name == event.meta['nick']:
return 'Not in this universe, maggot!'
else:
karmas[name] -= len(minuses)//2
@cmd
def karma(event):
"""Returns karma points for a given user."""
args = event.args
if len(args) < 1:
return "{0}'s karma level is: {1}".format(event.meta['nick'],
karmas[event.meta['nick']])
user = event.args[0]
if user not in karmas:
karmas[user] = 0
return "{0}'s karma level is: {1}".format(user, karmas[user])
|
Split long line to make it more readable.
|
Karma: Split long line to make it more readable.
Signed-off-by: Jakub Novak <3db738bfafc513cdba5d3154e6b5319945461327@gmail.com>
|
Python
|
apache-2.0
|
mrshu/brutal-plugins,Adman/brutal-plugins
|
---
+++
@@ -26,7 +26,8 @@
"""Returns karma points for a given user."""
args = event.args
if len(args) < 1:
- return "{0}'s karma level is: {1}".format(event.meta['nick'], karmas[event.meta['nick']])
+ return "{0}'s karma level is: {1}".format(event.meta['nick'],
+ karmas[event.meta['nick']])
user = event.args[0]
|
51060b1def98a98bee0a401205116e2cac056299
|
test_core.py
|
test_core.py
|
#!/usr/bin/env python
from ookoobah import core
from ookoobah import utils
grid = utils.make_grid_from_string("""
# # # # # #
# > . . \ #
# . # . | #
# . / | o #
# . \ . / #
# # # # # #
""")
game = core.Game(grid=grid)
game.start()
print "hit <enter> to render next; ^C to abort"
status = core.Game.STATUS_ON
while status == core.Game.STATUS_ON:
print utils.dump_game_to_string(game)
status = game.step()
raw_input()
print "game status: %s" % status
|
#!/usr/bin/env python
from ookoobah import core
from ookoobah import session
from ookoobah import utils
grid = utils.make_grid_from_string("""
# # # # # #
# > . . \ #
# . # . | #
# . / | o #
# . \ . / #
# # # # # #
""")
sess = session.Session(grid=grid)
sess.start()
print "<enter> to render next; <r> to reset; ^C to abort"
status = core.Game.STATUS_ON
while status == core.Game.STATUS_ON:
print utils.dump_game_to_string(sess.game)
status = sess.game.step()
if raw_input() == "r":
sess.reset()
sess.start()
print "game status: %s" % status
|
Switch to Session from a bare Game
|
test: Switch to Session from a bare Game
|
Python
|
mit
|
vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah,vickenty/ookoobah
|
---
+++
@@ -1,6 +1,7 @@
#!/usr/bin/env python
from ookoobah import core
+from ookoobah import session
from ookoobah import utils
grid = utils.make_grid_from_string("""
@@ -12,15 +13,17 @@
# # # # # #
""")
-game = core.Game(grid=grid)
-game.start()
+sess = session.Session(grid=grid)
+sess.start()
-print "hit <enter> to render next; ^C to abort"
+print "<enter> to render next; <r> to reset; ^C to abort"
status = core.Game.STATUS_ON
while status == core.Game.STATUS_ON:
- print utils.dump_game_to_string(game)
- status = game.step()
- raw_input()
+ print utils.dump_game_to_string(sess.game)
+ status = sess.game.step()
+ if raw_input() == "r":
+ sess.reset()
+ sess.start()
print "game status: %s" % status
|
a4beb8053780a9feb86fd85f0ce649717b9e7919
|
lib/disco/sysutil.py
|
lib/disco/sysutil.py
|
import sys, resource
from ctypes import *
import ctypes.util
if sys.platform == "darwin":
def available_memory():
libc = cdll.LoadLibrary(ctypes.util.find_library("libc"))
mem = c_uint64(0)
size = c_size_t(sizeof(mem))
libc.sysctlbyname.argtypes = [
c_char_p, c_void_p, c_void_p, c_void_p, c_ulong
]
libc.sysctlbyname(
"hw.memsize",
c_voidp(addressof(mem)),
c_voidp(addressof(size)),
None,
0
)
return int(mem.value)
elif "linux" in sys.platform:
def available_memory():
libc = cdll.LoadLibrary(ctypes.util.find_library("libc"))
return libc.getpagesize() * libc.get_phys_pages()
else:
def available_memory():
return int(1024**4)
def set_mem_limit(limit):
bytes = 0
if limit.endswith('%'):
p = float(limit[:-1]) / 100.0
bytes = int(p * available_memory())
elif limit:
bytes = int(limit)
if bytes > 0:
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
bmin = lambda x: min(bytes if x < 0 else x, bytes)
resource.setrlimit(resource.RLIMIT_AS, (bmin(soft), bmin(hard)))
|
import sys, resource
from ctypes import *
import ctypes.util
if sys.platform == "darwin":
def available_memory():
libc = cdll.LoadLibrary(ctypes.util.find_library("libc"))
mem = c_uint64(0)
size = c_size_t(sizeof(mem))
libc.sysctlbyname.argtypes = [
c_char_p, c_void_p, c_void_p, c_void_p, c_ulong
]
libc.sysctlbyname(
b"hw.memsize",
c_voidp(addressof(mem)),
c_voidp(addressof(size)),
None,
0
)
return int(mem.value)
elif "linux" in sys.platform:
def available_memory():
libc = cdll.LoadLibrary(ctypes.util.find_library("libc"))
return libc.getpagesize() * libc.get_phys_pages()
else:
def available_memory():
return int(1024**4)
def set_mem_limit(limit):
bytes = 0
if limit.endswith('%'):
p = float(limit[:-1]) / 100.0
bytes = int(p * available_memory())
elif limit:
bytes = int(limit)
if bytes > 0:
soft, hard = resource.getrlimit(resource.RLIMIT_AS)
bmin = lambda x: min(bytes if x < 0 else x, bytes)
resource.setrlimit(resource.RLIMIT_AS, (bmin(soft), bmin(hard)))
|
Make the passed arguments to sysctlbyname convertible to c_char_p.
|
Make the passed arguments to sysctlbyname convertible to c_char_p.
In python3, the default type of strings are unicode which cannot be
converted to c_char_p, resulting a type error from sysctlbyname,
using a b prefix for the string makes it convertible to c_char_p.
|
Python
|
bsd-3-clause
|
ErikDubbelboer/disco,beni55/disco,mwilliams3/disco,oldmantaiter/disco,ktkt2009/disco,ErikDubbelboer/disco,oldmantaiter/disco,pooya/disco,beni55/disco,pombredanne/disco,ktkt2009/disco,beni55/disco,simudream/disco,mozilla/disco,ktkt2009/disco,simudream/disco,mozilla/disco,discoproject/disco,ErikDubbelboer/disco,pooya/disco,pombredanne/disco,mwilliams3/disco,ktkt2009/disco,pooya/disco,ErikDubbelboer/disco,beni55/disco,beni55/disco,oldmantaiter/disco,discoproject/disco,ktkt2009/disco,discoproject/disco,ErikDubbelboer/disco,mozilla/disco,seabirdzh/disco,pombredanne/disco,seabirdzh/disco,pombredanne/disco,mozilla/disco,discoproject/disco,simudream/disco,seabirdzh/disco,pooya/disco,mwilliams3/disco,seabirdzh/disco,discoproject/disco,mwilliams3/disco,oldmantaiter/disco,simudream/disco,simudream/disco,mwilliams3/disco,seabirdzh/disco,pombredanne/disco,oldmantaiter/disco
|
---
+++
@@ -11,7 +11,7 @@
c_char_p, c_void_p, c_void_p, c_void_p, c_ulong
]
libc.sysctlbyname(
- "hw.memsize",
+ b"hw.memsize",
c_voidp(addressof(mem)),
c_voidp(addressof(size)),
None,
|
2c7464e8428359bec607623bffa3418e58ec8f1d
|
funbox/itertools_compat.py
|
funbox/itertools_compat.py
|
"""itertools compatibility for Python 2 and 3, for imap, izip and ifilter.
Just use:
from funbox.itertools_compat import imap, izip, ifilter
instead of:
from itertools import imap, izip, ifilter, ifilterfalse
>>> list(imap(int, ['1', '2', '3']))
[1, 2, 3]
>>> is_even = lambda x: (x % 2 == 0)
>>> list(ifilter(is_even, [1, 2, 3, 4]))
[2, 4]
>>> list(ifilterfalse(is_even, [1, 2, 3, 4]))
[1, 3]
>>> list(izip([1,2,3], [4,5,6]))
[(1, 4), (2, 5), (3, 6)]
"""
try:
from itertools import imap
except ImportError:
imap = map
try:
from itertools import ifilter
except ImportError:
ifilter = filter
try:
from itertools import izip
except ImportError:
izip = zip
try:
from itertools import ifilterfalse
except ImportError:
from itertools import filterfalse as ifilterfalse
if __name__ == "__main__":
import doctest
doctest.testmod()
|
"""itertools compatibility for Python 2 and 3, for imap, izip and ifilter.
Just use:
from funbox.itertools_compat import imap, izip, ifilter, ifilterfalse
instead of:
from itertools import imap, izip, ifilter, ifilterfalse
>>> list(imap(int, ['1', '2', '3']))
[1, 2, 3]
>>> is_even = lambda x: (x % 2 == 0)
>>> list(ifilter(is_even, [1, 2, 3, 4]))
[2, 4]
>>> list(ifilterfalse(is_even, [1, 2, 3, 4]))
[1, 3]
>>> list(izip([1,2,3], [4,5,6]))
[(1, 4), (2, 5), (3, 6)]
"""
try:
from itertools import imap
except ImportError:
imap = map
try:
from itertools import ifilter
except ImportError:
ifilter = filter
try:
from itertools import izip
except ImportError:
izip = zip
try:
from itertools import ifilterfalse
except ImportError:
from itertools import filterfalse as ifilterfalse
if __name__ == "__main__":
import doctest
doctest.testmod()
|
Fix small incompleteness in documentation.
|
Fix small incompleteness in documentation.
|
Python
|
mit
|
nmbooker/python-funbox,nmbooker/python-funbox
|
---
+++
@@ -2,7 +2,7 @@
"""itertools compatibility for Python 2 and 3, for imap, izip and ifilter.
Just use:
- from funbox.itertools_compat import imap, izip, ifilter
+ from funbox.itertools_compat import imap, izip, ifilter, ifilterfalse
instead of:
from itertools import imap, izip, ifilter, ifilterfalse
|
f7b8807bef552490227592827587f6d896a25a11
|
pulseguardian/mozdef.py
|
pulseguardian/mozdef.py
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
import json
import os
import sys
import pulseguardian.config
# Severities
DEBUG = 'DEBUG'
INFO = 'INFO'
NOTICE = 'NOTICE'
WARNING = 'WARNING'
ERROR = 'ERROR'
CRITICAL = 'CRITICAL'
ALERT = 'ALERT'
EMERGENCY = 'EMERGENCY'
# Categories
ACCOUNT_CREATION = 'Account Creation'
ACCOUNT_DELETION = 'Account Deletion'
ACCOUNT_UNLOCK = 'Account Unlock'
ACCOUNT_UPDATE = 'Account Update'
AUTHENTICATION = 'Authentication'
AUTHORIZATION = 'Authorization'
OTHER = 'Other'
SHUTDOWN = 'Shutdown'
STARTUP = 'Startup'
def log(sev, cat, summary, details=None, tags=None):
print '[{}] {}'.format(cat, summary)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import datetime
import json
import os
import sys
import pulseguardian.config
# Severities
DEBUG = 'DEBUG'
INFO = 'INFO'
NOTICE = 'NOTICE'
WARNING = 'WARNING'
ERROR = 'ERROR'
CRITICAL = 'CRITICAL'
ALERT = 'ALERT'
EMERGENCY = 'EMERGENCY'
# Categories
ACCOUNT_CREATION = 'Account Creation'
ACCOUNT_DELETION = 'Account Deletion'
ACCOUNT_UNLOCK = 'Account Unlock'
ACCOUNT_UPDATE = 'Account Update'
AUTHENTICATION = 'Authentication'
AUTHORIZATION = 'Authorization'
OTHER = 'Other'
SHUTDOWN = 'Shutdown'
STARTUP = 'Startup'
def log(sev, cat, summary, details=None, tags=None):
print '[{}] {} {} {}'.format(cat, summary, json.dumps(details), json.dumps(tags))
|
Add more details to logging
|
Add more details to logging
|
Python
|
mpl-2.0
|
mozilla/pulseguardian,mozilla/pulseguardian,mozilla/pulseguardian,mozilla/pulseguardian
|
---
+++
@@ -32,5 +32,5 @@
def log(sev, cat, summary, details=None, tags=None):
- print '[{}] {}'.format(cat, summary)
+ print '[{}] {} {} {}'.format(cat, summary, json.dumps(details), json.dumps(tags))
|
da4c39696a71077b34d4ab9347f7d7b4c5ef1601
|
scripts/create_test_data_file_from_bt.py
|
scripts/create_test_data_file_from_bt.py
|
import serial
import time
import platform
import csv
import zephyr.protocol
def main():
serial_port_dict = {"Darwin": "/dev/cu.BHBHT001931-iSerialPort1",
"Windows": 23}
serial_port = serial_port_dict[platform.system()]
ser = serial.Serial(serial_port)
callback = lambda x: None
protocol = zephyr.protocol.BioHarnessProtocol(ser, callback, "../test_data/120-second-bt-stream")
protocol.enable_periodic_packets()
start_time = time.time()
while time.time() < start_time + 120:
protocol.read_and_handle_bytes(1)
if __name__ == "__main__":
main()
|
import serial
import time
import platform
import csv
import threading
import zephyr.protocol
import zephyr.message
def callback(x):
print x
def reading_thread(protocol):
start_time = time.time()
while time.time() < start_time + 120:
protocol.read_and_handle_bytes(1)
def create_data_files(input_definitions):
threads = []
try:
for serial_i, (serial_port, enable_channels) in enumerate(input_definitions):
payload_parser = zephyr.message.MessagePayloadParser(callback)
ser = serial.Serial(serial_port)
protocol = zephyr.protocol.BioHarnessProtocol(ser, payload_parser.handle_message, "../test_data/120-second-bt-stream-%d" % serial_i)
if enable_channels:
protocol.enable_periodic_packets()
thread = threading.Thread(target=reading_thread, args=(protocol,))
threads.append(thread)
thread.start()
finally:
for thread in threads:
thread.join()
def main():
create_data_files([(29, False), (30, True)])
if __name__ == "__main__":
main()
|
Refactor to support multiple devices for test data generation
|
Refactor to support multiple devices for test data generation
|
Python
|
bsd-2-clause
|
jpaalasm/zephyr-bt
|
---
+++
@@ -3,24 +3,48 @@
import time
import platform
import csv
+import threading
import zephyr.protocol
+import zephyr.message
-def main():
- serial_port_dict = {"Darwin": "/dev/cu.BHBHT001931-iSerialPort1",
- "Windows": 23}
+
+def callback(x):
+ print x
+
+
+def reading_thread(protocol):
+ start_time = time.time()
- serial_port = serial_port_dict[platform.system()]
- ser = serial.Serial(serial_port)
-
- callback = lambda x: None
- protocol = zephyr.protocol.BioHarnessProtocol(ser, callback, "../test_data/120-second-bt-stream")
- protocol.enable_periodic_packets()
-
- start_time = time.time()
while time.time() < start_time + 120:
protocol.read_and_handle_bytes(1)
+def create_data_files(input_definitions):
+ threads = []
+
+ try:
+ for serial_i, (serial_port, enable_channels) in enumerate(input_definitions):
+ payload_parser = zephyr.message.MessagePayloadParser(callback)
+
+ ser = serial.Serial(serial_port)
+ protocol = zephyr.protocol.BioHarnessProtocol(ser, payload_parser.handle_message, "../test_data/120-second-bt-stream-%d" % serial_i)
+
+ if enable_channels:
+ protocol.enable_periodic_packets()
+
+ thread = threading.Thread(target=reading_thread, args=(protocol,))
+ threads.append(thread)
+ thread.start()
+
+ finally:
+ for thread in threads:
+ thread.join()
+
+
+def main():
+ create_data_files([(29, False), (30, True)])
+
+
if __name__ == "__main__":
main()
|
a49cc6d6ca1ce22358292c00d847cb424306b229
|
wordsaladflask.py
|
wordsaladflask.py
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpuses(self):
"""Fetch a list of "corpus:es" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
|
import wordsalad
from flask import Flask
App = Flask(__name__)
@App.route("salad/<int:n>/<string:corpus>")
def _get(self, n, corpus="default"):
"""Generate n word salads from the given (optional) corpus."""
pass
@App.route("salad/corpuses")
def _get_corpora(self):
"""Fetch a list of "corpora" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
def main():
app.run()
if __name__ == '__main__':
main()
|
Use the proper words ;)
|
Use the proper words ;)
|
Python
|
mit
|
skurmedel/wordsalad
|
---
+++
@@ -9,8 +9,14 @@
pass
@App.route("salad/corpuses")
-def _get_corpuses(self):
- """Fetch a list of "corpus:es" we can use as a source text.
+def _get_corpora(self):
+ """Fetch a list of "corpora" we can use as a source text.
Returns the list as a JSON-list of strings."""
pass
+
+def main():
+ app.run()
+
+if __name__ == '__main__':
+ main()
|
fb20bd41b5373c994274aa8565ba579fa13c8c28
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.9',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Update the PyPI version to 0.2.10
|
Update the PyPI version to 0.2.10
|
Python
|
mit
|
Doist/todoist-python,electronick1/todoist-python
|
---
+++
@@ -10,7 +10,7 @@
setup(
name='todoist-python',
- version='0.2.9',
+ version='0.2.10',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
|
4bd13f0385dbac9855f6117afea0911bce1af3b3
|
setup.py
|
setup.py
|
#! /usr/bin/env python
#-*- coding=utf-8 -*-
from setuptools import setup
setup(name='kitabu',
author=u'Adrian Duraj & Marek Brzóska',
author_email='adrian.duraj@gmail.com, brzoskamarek@gmail.com',
description="Django library to build reservation application",
license='MIT',
version='dev',
url='https://github.com/mbad/kitabu',
packages=['kitabu'],
#include_package_data=True,
install_requires=[
'Django==1.5c1',
'South>=0.7.6',
],
dependency_links=[
'http://github.com/mbad/kitabu/tarball/master#egg=kitabu-dev'
],
)
|
#! /usr/bin/env python
#-*- coding=utf-8 -*-
from setuptools import setup
setup(name='kitabu',
author=u'Adrian Duraj & Marek Brzóska',
author_email='adrian.duraj@gmail.com, brzoskamarek@gmail.com',
description="Django library to build reservation application",
license='MIT',
version='dev',
url='https://github.com/mbad/kitabu',
packages=['kitabu'],
#include_package_data=True,
install_requires=[
'Django>=1.5c1',
'South>=0.7.6',
],
dependency_links=[
'http://github.com/django/django/tarball/1.5c1#egg=Django-1.5c1',
],
)
|
Fix dependency link for Django 1.5rc1
|
Fix dependency link for Django 1.5rc1
|
Python
|
mit
|
mbad/kitabu,mbad/kitabu,mbad/kitabu
|
---
+++
@@ -14,10 +14,10 @@
packages=['kitabu'],
#include_package_data=True,
install_requires=[
- 'Django==1.5c1',
+ 'Django>=1.5c1',
'South>=0.7.6',
],
dependency_links=[
- 'http://github.com/mbad/kitabu/tarball/master#egg=kitabu-dev'
+ 'http://github.com/django/django/tarball/1.5c1#egg=Django-1.5c1',
],
)
|
a333bb06a913ca87aac77775e22fdb00c320cbb6
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import pathlib
from setuptools import setup
setup(
name="ipytest",
version="0.8.0",
description="Unit tests in IPython notebooks.",
long_description=pathlib.Path("Readme.md").read_text(),
long_description_content_type="text/markdown",
author="Christopher Prohm",
author_email="mail@cprohm.de",
url="https://github.com/chmp/ipytest",
license="MIT",
packages=["ipytest"],
install_requires=["packaging"],
tests_require=["pytest"],
python_requires=">=3",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Testing",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
],
)
|
#!/usr/bin/env python
import pathlib
from setuptools import setup
setup(
name="ipytest",
version="0.8.0",
description="Unit tests in IPython notebooks.",
long_description=pathlib.Path("Readme.md").read_text(),
long_description_content_type="text/markdown",
author="Christopher Prohm",
author_email="mail@cprohm.de",
license="MIT",
packages=["ipytest"],
install_requires=["packaging"],
tests_require=["pytest"],
python_requires=">=3",
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Topic :: Software Development :: Testing",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python :: 3",
],
)
|
Revert "Add URL to project"
|
Revert "Add URL to project"
|
Python
|
mit
|
chmp/ipytest
|
---
+++
@@ -11,7 +11,6 @@
long_description_content_type="text/markdown",
author="Christopher Prohm",
author_email="mail@cprohm.de",
- url="https://github.com/chmp/ipytest",
license="MIT",
packages=["ipytest"],
install_requires=["packaging"],
|
39ab8f61731a383ea1befd18df5483a25811c0ca
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='rgkit',
version='0.2',
description='Robot Game Testing Kit',
maintainer='Peter Wen',
maintainer_email='peter@whitehalmos.org',
url='https://github.com/WhiteHalmos/rgkit',
packages=['rgkit'],
package_data={'rgkit': ['maps/*.py']},
license='Unlicense',
entry_points={
'console_scripts': [
'rgrun = rgkit.run:main',
'rgmap = rgkit.mapeditor:main'
]
},
)
|
from setuptools import setup
setup(
name='rgkit',
version='0.2.1',
description='Robot Game Testing Kit',
maintainer='Peter Wen',
maintainer_email='peter@whitehalmos.org',
url='https://github.com/WhiteHalmos/rgkit',
packages=['rgkit'],
package_data={'rgkit': ['maps/*.py']},
license='Unlicense',
entry_points={
'console_scripts': [
'rgrun = rgkit.run:main',
'rgmap = rgkit.mapeditor:main'
]
},
)
|
Increment version number. Forgot to commit before.
|
Increment version number. Forgot to commit before.
|
Python
|
unlicense
|
RobotGame/rgkit,RobotGame/rgkit,mpeterv/rgkit,mpeterv/rgkit
|
---
+++
@@ -2,7 +2,7 @@
setup(
name='rgkit',
- version='0.2',
+ version='0.2.1',
description='Robot Game Testing Kit',
maintainer='Peter Wen',
maintainer_email='peter@whitehalmos.org',
|
deca54d2b3481647abaa19d30dfc8f8e9a8b719e
|
setup.py
|
setup.py
|
from setuptools import setup
install_requires = ['jsonref', 'schema', 'openpyxl>=2.6', 'pytz',
'xmltodict', 'lxml', 'odfpy']
setup(
name='flattentool',
version='0.10.0',
author='Open Data Services',
author_email='code@opendataservices.coop',
packages=['flattentool'],
scripts=['flatten-tool'],
url='https://github.com/OpenDataServices/flatten-tool',
license='MIT',
description='Tools for generating CSV and other flat versions of the structured data',
install_requires=install_requires,
extras_require = {
'HTTP': ['requests']
}
)
|
from setuptools import setup
install_requires = ['jsonref', 'schema', 'openpyxl>=2.6,!=3.0.2', 'pytz',
'xmltodict', 'lxml', 'odfpy']
setup(
name='flattentool',
version='0.10.0',
author='Open Data Services',
author_email='code@opendataservices.coop',
packages=['flattentool'],
scripts=['flatten-tool'],
url='https://github.com/OpenDataServices/flatten-tool',
license='MIT',
description='Tools for generating CSV and other flat versions of the structured data',
install_requires=install_requires,
extras_require = {
'HTTP': ['requests']
}
)
|
Update openpyxl version restriction, because 3.0.3 is fine
|
Update openpyxl version restriction, because 3.0.3 is fine
|
Python
|
mit
|
OpenDataServices/flatten-tool
|
---
+++
@@ -1,7 +1,8 @@
from setuptools import setup
-install_requires = ['jsonref', 'schema', 'openpyxl>=2.6', 'pytz',
+install_requires = ['jsonref', 'schema', 'openpyxl>=2.6,!=3.0.2', 'pytz',
'xmltodict', 'lxml', 'odfpy']
+
setup(
name='flattentool',
version='0.10.0',
|
220d1e99988f29e69295c70ef8428fa2cb3aa6f6
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='aacrgenie',
version='1.6.2',
description='Processing and validation for GENIE',
url='https://github.com/Sage-Bionetworks/Genie',
author='Thomas Yu',
author_email='thomasyu888@gmail.com',
license='MIT',
packages=find_packages(),
zip_safe=False,
data_files=[('genie',['genie/addFeatureType.sh','genie/createGTF.sh'])],
entry_points = {
'console_scripts': ['genie = genie.__main__:main']},
install_requires=[
'pandas>=0.20.0',
'synapseclient',
'httplib2',
'pycrypto'])
|
from setuptools import setup, find_packages
setup(name='aacrgenie',
version='1.6.2',
description='Processing and validation for GENIE',
url='https://github.com/Sage-Bionetworks/Genie',
author='Thomas Yu',
author_email='thomasyu888@gmail.com',
license='MIT',
packages=find_packages(),
zip_safe=False,
data_files=[('genie',['genie/addFeatureType.sh','genie/createGTF.sh'])],
entry_points = {
'console_scripts': ['genie = genie.__main__:main']},
install_requires=[
'pandas>=0.20.0',
'synapseclient>=1.9',
'httplib2>=0.11.3',
'pycrypto>=2.6.1',
'yaml>=3.11'])
|
Add yaml to installation requirements
|
Add yaml to installation requirements
|
Python
|
mit
|
thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie,thomasyu888/Genie
|
---
+++
@@ -14,6 +14,7 @@
'console_scripts': ['genie = genie.__main__:main']},
install_requires=[
'pandas>=0.20.0',
- 'synapseclient',
- 'httplib2',
- 'pycrypto'])
+ 'synapseclient>=1.9',
+ 'httplib2>=0.11.3',
+ 'pycrypto>=2.6.1',
+ 'yaml>=3.11'])
|
4b253f3620c3bc108982fb3f362dbe81e3e7ab3d
|
setup.py
|
setup.py
|
from setuptools import setup
import tamarin
DESCRIPTION = "A Django app for monitoring AWS usage in Django's admin."
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
version_str = '%d.%d' % (tamarin.VERSION[0], tamarin.VERSION[1])
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='tamarin',
version=version_str,
packages=[
'tamarin',
'tamarin.management', 'tamarin.management.commands',
'tamarin.migrations',
],
author='Gregory Taylor',
author_email='gtaylor@duointeractive.com',
url='https://github.com/duointeractive/tamarin/',
license='MIT',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['boto', 'pyparsing'],
)
|
from setuptools import setup
import tamarin
DESCRIPTION = "A Django app for monitoring AWS usage in Django's admin."
LONG_DESCRIPTION = None
try:
LONG_DESCRIPTION = open('README.rst').read()
except:
pass
version_str = '%d.%d' % (tamarin.VERSION[0], tamarin.VERSION[1])
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Framework :: Django',
]
setup(
name='tamarin',
version=version_str,
packages=[
'tamarin',
'tamarin.management', 'tamarin.management.commands',
'tamarin.migrations',
],
author='Gregory Taylor',
author_email='gtaylor@duointeractive.com',
url='https://github.com/duointeractive/tamarin/',
license='BSD',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
classifiers=CLASSIFIERS,
install_requires=['boto', 'pyparsing'],
)
|
Correct the trove categorization to say License = BSD.
|
Correct the trove categorization to say License = BSD.
|
Python
|
bsd-3-clause
|
duointeractive/tamarin,duointeractive/tamarin,duointeractive/tamarin
|
---
+++
@@ -14,7 +14,7 @@
CLASSIFIERS = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
- 'License :: OSI Approved :: MIT License',
+ 'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
@@ -32,7 +32,7 @@
author='Gregory Taylor',
author_email='gtaylor@duointeractive.com',
url='https://github.com/duointeractive/tamarin/',
- license='MIT',
+ license='BSD',
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
platforms=['any'],
|
02b4f6a9513aa5213de0573b6a514a7221e7d625
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import re
from setuptools import setup
setup_dir = os.path.dirname(__file__)
readme_contents = open(os.path.join(setup_dir, 'README.rst')).read()
faucet_version = re.match(r'.+version: ([0-9\.]+)', readme_contents).group(1)
os.environ["PBR_VERSION"] = faucet_version
setup(
name='faucet',
setup_requires=['pbr>=1.9', 'setuptools>=17.1'],
pbr=True
)
|
#!/usr/bin/env python
import io
import os
import re
from setuptools import setup
setup_dir = os.path.dirname(__file__)
readme_contents = io.open(os.path.join(setup_dir, 'README.rst'), encoding="utf-8").read()
faucet_version = re.match(r'.+version: ([0-9\.]+)', readme_contents).group(1)
os.environ["PBR_VERSION"] = faucet_version
setup(
name='faucet',
setup_requires=['pbr>=1.9', 'setuptools>=17.1'],
pbr=True
)
|
Use py2 and py3 compatible io.read() instead of read().
|
Use py2 and py3 compatible io.read() instead of read().
|
Python
|
apache-2.0
|
trentindav/faucet,wackerly/faucet,trentindav/faucet,gizmoguy/faucet,Bairdo/faucet,REANNZ/faucet,Bairdo/faucet,byllyfish/faucet,byllyfish/faucet,gizmoguy/faucet,shivarammysore/faucet,trungdtbk/faucet,mwutzke/faucet,shivarammysore/faucet,anarkiwi/faucet,REANNZ/faucet,faucetsdn/faucet,trungdtbk/faucet,anarkiwi/faucet,mwutzke/faucet,wackerly/faucet,faucetsdn/faucet
|
---
+++
@@ -1,12 +1,13 @@
#!/usr/bin/env python
+import io
import os
import re
from setuptools import setup
setup_dir = os.path.dirname(__file__)
-readme_contents = open(os.path.join(setup_dir, 'README.rst')).read()
+readme_contents = io.open(os.path.join(setup_dir, 'README.rst'), encoding="utf-8").read()
faucet_version = re.match(r'.+version: ([0-9\.]+)', readme_contents).group(1)
os.environ["PBR_VERSION"] = faucet_version
|
fa4be57f00827ea452e0d7bc1c0b5b17f20a6d2d
|
test.py
|
test.py
|
import nltk
import xml.dom.minidom as dom
import codecs
file = open("tweets.xml")
tree = dom.parse(file)
i = 0
e = 0
for tweet in tree.firstChild.childNodes:
try:
textNodes = tweet.getElementsByTagName("text")
x = tree.createElement("foo")
for textNode in textNodes:
textValue = textNode.firstChild.nodeValue
tokenized = nltk.word_tokenize(textValue)
tokenizedNode = tree.createTextNode(' '.join(word for word in tokenized))
pos = nltk.pos_tag(tokenized)
print tokenizedNode.nodeValue
x.appendChild(tokenizedNode)
tweet.appendChild(x)
#print textNode.firstChild.nodeValue
i += 1
print i
except:
e += 1
newFile = codecs.open("tweets_modified.xml", "w", encoding='utf-8')
tree.writexml(newFile)
file.close()
print "Program finished, found " + str(i) + " entries"
print str(e) + " tweets failed"
|
import nltk
import xml.dom.minidom as dom
import codecs
import nltk.data
sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
file = open("tweets.xml")
tree = dom.parse(file)
i = 0
e = 0
for tweet in tree.firstChild.childNodes:
try:
textNodes = tweet.getElementsByTagName("text")
posTagsNode = tree.createElement("posTags")
for textNode in textNodes:
text = textNode.firstChild.nodeValue
#print('\n-----\n'.join(sent_detector.tokenize(text.strip())))
textSentence = sent_detector.tokenize(text.strip())
textId = 0
for sentence in textSentence:
sentenceNode = tree.createElement("sentence")
sentenceNode.setAttribute("id", str(textId))
tokenized = nltk.word_tokenize(sentence)
pos = nltk.pos_tag(tokenized)
posId = 0
for posTuple in pos:
posNode = tree.createElement("word")
posNode.setAttribute("pos", posTuple[1])
posNode.setAttribute("id", str(posId))
posValueNode = tree.createTextNode(posTuple[0])
posNode.appendChild(posValueNode)
sentenceNode.appendChild(posNode)
posId += 1
posTagsNode.appendChild(sentenceNode)
textId += 1
tweet.appendChild(posTagsNode)
i += 1
print i
except:
e += 1
newFile = codecs.open("tweets_modified.xml", "w", encoding='utf-8')
tree.writexml(newFile)
file.close()
print "Program finished, found " + str(i) + " entries"
print str(e) + " tweets failed"
|
Test implementation of xml.minidom / nltk
|
Test implementation of xml.minidom / nltk
|
Python
|
apache-2.0
|
markusmichel/Tworpus-Client,markusmichel/Tworpus-Client,markusmichel/Tworpus-Client
|
---
+++
@@ -1,6 +1,13 @@
import nltk
import xml.dom.minidom as dom
import codecs
+
+
+
+import nltk.data
+sent_detector = nltk.data.load('tokenizers/punkt/english.pickle')
+
+
file = open("tweets.xml")
tree = dom.parse(file)
@@ -11,20 +18,41 @@
try:
textNodes = tweet.getElementsByTagName("text")
-
- x = tree.createElement("foo")
+ posTagsNode = tree.createElement("posTags")
for textNode in textNodes:
- textValue = textNode.firstChild.nodeValue
- tokenized = nltk.word_tokenize(textValue)
- tokenizedNode = tree.createTextNode(' '.join(word for word in tokenized))
+ text = textNode.firstChild.nodeValue
- pos = nltk.pos_tag(tokenized)
- print tokenizedNode.nodeValue
- x.appendChild(tokenizedNode)
- tweet.appendChild(x)
+ #print('\n-----\n'.join(sent_detector.tokenize(text.strip())))
- #print textNode.firstChild.nodeValue
+ textSentence = sent_detector.tokenize(text.strip())
+
+ textId = 0
+
+ for sentence in textSentence:
+
+ sentenceNode = tree.createElement("sentence")
+ sentenceNode.setAttribute("id", str(textId))
+
+ tokenized = nltk.word_tokenize(sentence)
+
+ pos = nltk.pos_tag(tokenized)
+
+ posId = 0
+ for posTuple in pos:
+ posNode = tree.createElement("word")
+ posNode.setAttribute("pos", posTuple[1])
+ posNode.setAttribute("id", str(posId))
+ posValueNode = tree.createTextNode(posTuple[0])
+ posNode.appendChild(posValueNode)
+ sentenceNode.appendChild(posNode)
+ posId += 1
+
+ posTagsNode.appendChild(sentenceNode)
+ textId += 1
+
+ tweet.appendChild(posTagsNode)
+
i += 1
print i
|
7205324a6081a73d2b332afd42e23cd0447e6617
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name='django-easymoney',
version='0.5',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='An easy MoneyField for Django.',
long_description=open('README.rst').read(),
url='http://github.com/Suor/django-easymoney',
license='BSD',
py_modules=['easymoney'],
install_requires=[
'django>=1.6',
'babel',
'six',
],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from setuptools import setup
setup(
name='django-easymoney',
version='0.5',
author='Alexander Schepanovski',
author_email='suor.web@gmail.com',
description='An easy MoneyField for Django.',
long_description=open('README.rst').read(),
url='http://github.com/Suor/django-easymoney',
license='BSD',
py_modules=['easymoney'],
install_requires=[
'django>=1.6',
'babel',
'six',
],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Framework :: Django :: 1.6',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
Add Django version trove classifiers
|
Add Django version trove classifiers
|
Python
|
bsd-2-clause
|
Suor/django-easymoney
|
---
+++
@@ -25,6 +25,9 @@
'Programming Language :: Python',
'Framework :: Django',
+ 'Framework :: Django :: 1.6',
+ 'Framework :: Django :: 1.7',
+ 'Framework :: Django :: 1.8',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python :: 2',
|
0276d0e5ccf8d63adb7dd4438d67c0ff2c5bc3ae
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
requires = [
'caliopen.api.base',
]
setup(name='caliopen.api.user',
namespace_packages=['caliopen', 'caliopen.api'],
version='0.0.1',
description='Caliopen REST API for user and contact management.',
long_description=README + '\n\n' + CHANGES,
classifiers=["Programming Language :: Python", ],
author='Caliopen contributors',
author_email='contact@caliopen.org',
url='https://caliopen.org',
license='AGPLv3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=requires,
)
|
import os
from setuptools import setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
README = open(os.path.join(here, 'README.rst')).read()
CHANGES = open(os.path.join(here, 'CHANGES.rst')).read()
requires = [
'caliopen.api.base',
]
extras_require = {
'dev': [],
'test': [],
}
setup(name='caliopen.api.user',
namespace_packages=['caliopen', 'caliopen.api'],
version='0.0.1',
description='Caliopen REST API for user and contact management.',
long_description=README + '\n\n' + CHANGES,
classifiers=["Programming Language :: Python", ],
author='Caliopen contributors',
author_email='contact@caliopen.org',
url='https://caliopen.org',
license='AGPLv3',
packages=find_packages(),
include_package_data=True,
zip_safe=False,
extras_require=extras_require,
install_requires=requires,
tests_require=requires,
)
|
Add missing extras package declaration
|
Add missing extras package declaration
|
Python
|
agpl-3.0
|
ziir/caliopen.api.user
|
---
+++
@@ -9,6 +9,11 @@
requires = [
'caliopen.api.base',
]
+
+extras_require = {
+ 'dev': [],
+ 'test': [],
+}
setup(name='caliopen.api.user',
@@ -24,6 +29,7 @@
packages=find_packages(),
include_package_data=True,
zip_safe=False,
+ extras_require=extras_require,
install_requires=requires,
tests_require=requires,
)
|
6e426e4ae0dd3841ea7d92b7434c858cf39e9ef4
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os, sys, glob
from setuptools import setup, find_packages
setup(
name='aegea',
version='0.6.0',
url='https://github.com/kislyuk/aegea',
license=open('LICENSE.md').readline().strip(),
author='Andrey Kislyuk',
author_email='kislyuk@gmail.com',
description='Amazon Web Services Operator Interface',
long_description=open('README.rst').read(),
install_requires=[
'setuptools',
'boto3 >= 1.3.0',
'argcomplete >= 1.1.0',
'paramiko >= 1.16.0',
'requests >= 2.9.1',
'tweak >= 0.3.2',
'keymaker >= 0.1.7',
'pyyaml >= 3.11',
'python-dateutil >= 2.1'
],
extras_require={
':python_version == "2.7"': [
'enum34 >= 1.0.4',
'ipaddress >= 1.0.16',
'backports.statistics >= 0.1.0'
]
},
packages=find_packages(exclude=['test']),
scripts=glob.glob('scripts/*'),
platforms=['MacOS X', 'Posix'],
test_suite='test',
include_package_data=True
)
|
#!/usr/bin/env python
import os, sys, glob, subprocess
from setuptools import setup, find_packages
try:
version = subprocess.check_output(["git", "describe", "--tags", "--match", "v*.*.*"]).strip("v\n")
except:
version = "0.0.0"
setup(
name='aegea',
version=version,
url='https://github.com/kislyuk/aegea',
license=open('LICENSE.md').readline().strip(),
author='Andrey Kislyuk',
author_email='kislyuk@gmail.com',
description='Amazon Web Services Operator Interface',
long_description=open('README.rst').read(),
install_requires=[
'setuptools',
'boto3 >= 1.3.0',
'argcomplete >= 1.1.0',
'paramiko >= 1.16.0',
'requests >= 2.9.1',
'tweak >= 0.3.2',
'keymaker >= 0.1.7',
'pyyaml >= 3.11',
'python-dateutil >= 2.1'
],
extras_require={
':python_version == "2.7"': [
'enum34 >= 1.0.4',
'ipaddress >= 1.0.16',
'backports.statistics >= 0.1.0'
]
},
packages=find_packages(exclude=['test']),
scripts=glob.glob('scripts/*'),
platforms=['MacOS X', 'Posix'],
test_suite='test',
include_package_data=True
)
|
Use git describe output for version
|
Use git describe output for version
|
Python
|
apache-2.0
|
kislyuk/aegea,wholebiome/aegea,wholebiome/aegea,kislyuk/aegea,wholebiome/aegea,kislyuk/aegea
|
---
+++
@@ -1,11 +1,16 @@
#!/usr/bin/env python
-import os, sys, glob
+import os, sys, glob, subprocess
from setuptools import setup, find_packages
+
+try:
+ version = subprocess.check_output(["git", "describe", "--tags", "--match", "v*.*.*"]).strip("v\n")
+except:
+ version = "0.0.0"
setup(
name='aegea',
- version='0.6.0',
+ version=version,
url='https://github.com/kislyuk/aegea',
license=open('LICENSE.md').readline().strip(),
author='Andrey Kislyuk',
|
70929aa10fb59ed25c8fc4e76ce60bd6d2934c3f
|
rcamp/rcamp/settings/auth.py
|
rcamp/rcamp/settings/auth.py
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'login',
'csu': 'csu'
}
|
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'lib.pam_backend.PamBackend',
)
AUTH_USER_MODEL = 'accounts.User'
LOGIN_URL = '/login'
PAM_SERVICES = {
'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
Change the default pam login service
|
Change the default pam login service
|
Python
|
mit
|
ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP,ResearchComputing/RCAMP
|
---
+++
@@ -8,6 +8,6 @@
LOGIN_URL = '/login'
PAM_SERVICES = {
- 'default': 'login',
+ 'default': 'curc-twofactor-duo',
'csu': 'csu'
}
|
db3cadcf3baa22efe65495aca2efe5352d5a89a5
|
nhs/gunicorn_conf.py
|
nhs/gunicorn_conf.py
|
bind = "127.0.0.1:4567"
logfile = "/usr/local/ohc/log/op.gunicorn.log"
workers = 3
|
bind = "127.0.0.1:4567"
logfile = "/usr/local/ohc/log/op.gunicorn.log"
workers = 3
timeout = 60
|
Extend Gunicorn worker timeout for long-running API calls.
|
Extend Gunicorn worker timeout for long-running API calls.
|
Python
|
agpl-3.0
|
openhealthcare/open-prescribing,openhealthcare/open-prescribing,openhealthcare/open-prescribing
|
---
+++
@@ -1,3 +1,4 @@
bind = "127.0.0.1:4567"
logfile = "/usr/local/ohc/log/op.gunicorn.log"
workers = 3
+timeout = 60
|
2a6313e2ed7cfbd81e6779e6f014500d801ccc8c
|
xword/__init__.py
|
xword/__init__.py
|
__version__ = '2.0.0~rc1'
__license__ = '''
Copyright (c) 2009-2011, Cameron Dale <camrdale@gmail.com>
Copyright (c) 2005-2009, Bill McCloskey <bill.mccloskey@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
|
__version__ = '2.0.0~rc2'
__license__ = '''
Copyright (c) 2009-2011, Cameron Dale <camrdale@gmail.com>
Copyright (c) 2005-2009, Bill McCloskey <bill.mccloskey@gmail.com>
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. The names of the contributors may not be used to endorse or promote
products derived from this software without specific prior written
permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
'''
|
Update the version for 2.0.0~rc2 release.
|
Update the version for 2.0.0~rc2 release.
|
Python
|
bsd-3-clause
|
dnrce/xword
|
---
+++
@@ -1,4 +1,4 @@
-__version__ = '2.0.0~rc1'
+__version__ = '2.0.0~rc2'
__license__ = '''
Copyright (c) 2009-2011, Cameron Dale <camrdale@gmail.com>
|
078050de92362115ffa32f03478b6658bb4da63f
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='googleanalytics',
description='A wrapper for the Google Analytics API.',
long_description=open('README.rst').read(),
author='Stijn Debrouwere',
author_email='stijn@debrouwere.org',
url='https://github.com/debrouwere/google-analytics/',
download_url='http://www.github.com/debrouwere/google-analytics/tarball/master',
version='0.16.2',
license='ISC',
packages=find_packages(),
keywords='data analytics api wrapper google',
scripts=[
'bin/googleanalytics'
],
install_requires=[
'oauth2client>=1.4.6',
'google-api-python-client==1.4',
'python-dateutil',
'addressable>=1.4.2',
'inspect-it>=0.3.2',
'werkzeug==0.10',
'keyring==5.3',
'click==3.3',
'pyyaml>=3',
],
test_suite='googleanalytics.tests',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Information Analysis',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
)
|
from setuptools import setup, find_packages
setup(name='googleanalytics',
description='A wrapper for the Google Analytics API.',
long_description=open('README.rst').read(),
author='Stijn Debrouwere',
author_email='stijn@debrouwere.org',
url='https://github.com/debrouwere/google-analytics/',
download_url='http://www.github.com/debrouwere/google-analytics/tarball/master',
version='0.16.2',
license='ISC',
packages=find_packages(),
keywords='data analytics api wrapper google',
scripts=[
'bin/googleanalytics'
],
install_requires=[
'oauth2client>=1.4.6',
'google-api-python-client==1.4',
'python-dateutil',
'addressable>=1.4.2',
'inspect-it>=0.3.2',
'werkzeug>=0.10',
'keyring==5.3',
'click==3.3',
'pyyaml>=3',
],
test_suite='googleanalytics.tests',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Scientific/Engineering :: Information Analysis',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
],
)
|
Allow for higher versions of werkzeug
|
Allow for higher versions of werkzeug
Install fails when a version of `werkzeug` greater than `0.10` is already present in the environment (current version is `0.10.4`)
|
Python
|
isc
|
debrouwere/google-analytics
|
---
+++
@@ -20,7 +20,7 @@
'python-dateutil',
'addressable>=1.4.2',
'inspect-it>=0.3.2',
- 'werkzeug==0.10',
+ 'werkzeug>=0.10',
'keyring==5.3',
'click==3.3',
'pyyaml>=3',
|
de1a11e770dc5e3639247cf94d1509ea73aa2554
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='byterestclient',
version='0.1',
packages=find_packages(exclude=['test*']),
url='',
license='',
author='Allard Hoeve',
author_email='allard@byte.nl',
description='A generic REST client',
install_requires=['requests>=2.2.1'],
test_suite="tests",
)
|
from setuptools import setup, find_packages
setup(
name='byterestclient',
version='0.1',
packages=find_packages(exclude=['test*']),
url='https:/github.com/ByteInternet/pythono-byterestclient',
author='Allard Hoeve',
author_email='allard@byte.nl',
description='A generic REST client',
install_requires=['requests'],
test_suite="tests",
)
|
Install any which requests you want
|
Install any which requests you want
|
Python
|
mit
|
ByteInternet/python-byterestclient,ByteInternet/python-byterestclient
|
---
+++
@@ -4,11 +4,10 @@
name='byterestclient',
version='0.1',
packages=find_packages(exclude=['test*']),
- url='',
- license='',
+ url='https:/github.com/ByteInternet/pythono-byterestclient',
author='Allard Hoeve',
author_email='allard@byte.nl',
description='A generic REST client',
- install_requires=['requests>=2.2.1'],
+ install_requires=['requests'],
test_suite="tests",
)
|
ef86ea4a78c6a617c9872762e86198cad7d0a50e
|
setup.py
|
setup.py
|
# coding: utf-8
from setuptools import setup
# lets prepare our initial setup
setup_dict = {'name': 'PYSCSI',
'version': '1.0',
'license': 'LGPLv2.1',
'author': 'Ronnie Sahlberg',
'author_email': 'ronniesahlberg@gmail.com',
'description': 'Module for calling SCSI devices from Python',
'packages': ['pyscsi', 'pyscsi.pyscsi', 'pyscsi.pyiscsi', 'pyscsi.utils'],
'python_requires': '~=3.7',
'extras_require': {'sgio': ['cython-sgio'],
'iscsi': ['cython-iscsi'],
},
}
setup(**setup_dict)
|
# coding: utf-8
from setuptools import find_packages, setup
# lets prepare our initial setup
setup_dict = {'name': 'PYSCSI',
'version': '1.0',
'license': 'LGPLv2.1',
'author': 'Ronnie Sahlberg',
'author_email': 'ronniesahlberg@gmail.com',
'description': 'Module for calling SCSI devices from Python',
'packages': find_packages(),
'python_requires': '~=3.7',
'extras_require': {'sgio': ['cython-sgio'],
'iscsi': ['cython-iscsi'],
},
}
setup(**setup_dict)
|
Use find_packages instead of listing them manually.
|
Use find_packages instead of listing them manually.
|
Python
|
lgpl-2.1
|
rosjat/python-scsi
|
---
+++
@@ -1,6 +1,6 @@
# coding: utf-8
-from setuptools import setup
+from setuptools import find_packages, setup
# lets prepare our initial setup
setup_dict = {'name': 'PYSCSI',
@@ -9,7 +9,7 @@
'author': 'Ronnie Sahlberg',
'author_email': 'ronniesahlberg@gmail.com',
'description': 'Module for calling SCSI devices from Python',
- 'packages': ['pyscsi', 'pyscsi.pyscsi', 'pyscsi.pyiscsi', 'pyscsi.utils'],
+ 'packages': find_packages(),
'python_requires': '~=3.7',
'extras_require': {'sgio': ['cython-sgio'],
'iscsi': ['cython-iscsi'],
|
b786ae0b845374ca42db42ac64322d6aa9e894c5
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='TOPKAPI',
version='0.2dev',
description='SAHG TOPKAPI model implementation',
author='Theo Vischel & Scott Sinclair',
author_email='theo.vischel@hmg.inpg.fr; sinclaird@ukzn.ac.za',
packages=['TOPKAPI', 'TOPKAPI.parameter_utils', 'TOPKAPI.results_analysis'],
package_dir={'TOPKAPI':'lib'}
)
|
from distutils.core import setup
setup(name='TOPKAPI',
version='0.2dev',
description='SAHG TOPKAPI model implementation',
author='Theo Vischel & Scott Sinclair',
author_email='theo.vischel@hmg.inpg.fr; sinclaird@ukzn.ac.za',
packages=['TOPKAPI',
'TOPKAPI.parameter_utils',
'TOPKAPI.results_analysis'],
package_dir={'TOPKAPI':'lib'}
)
|
Reformat to be more pleasing on the eye
|
STY: Reformat to be more pleasing on the eye
|
Python
|
bsd-3-clause
|
sahg/PyTOPKAPI,scottza/PyTOPKAPI
|
---
+++
@@ -5,7 +5,9 @@
description='SAHG TOPKAPI model implementation',
author='Theo Vischel & Scott Sinclair',
author_email='theo.vischel@hmg.inpg.fr; sinclaird@ukzn.ac.za',
- packages=['TOPKAPI', 'TOPKAPI.parameter_utils', 'TOPKAPI.results_analysis'],
+ packages=['TOPKAPI',
+ 'TOPKAPI.parameter_utils',
+ 'TOPKAPI.results_analysis'],
package_dir={'TOPKAPI':'lib'}
)
|
39235bffda1ac908a6b900432a6396d3522635e5
|
setup.py
|
setup.py
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='class_namespaces',
version='0.2.1',
description='Class Namespaces',
long_description=long_description,
url='https://github.com/mwchase/class-namespaces',
author='Max Woerner Chase',
author_email='max.chase@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='class namespaces',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'test': ['coverage', 'pytest'],
},
)
|
"""A setuptools based setup module.
See:
https://packaging.python.org/en/latest/distributing.html
https://github.com/pypa/sampleproject
"""
# Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='class_namespaces',
version='0.3.0',
description='Class Namespaces',
long_description=long_description,
url='https://github.com/mwchase/class-namespaces',
author='Max Woerner Chase',
author_email='max.chase@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Libraries',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='class namespaces',
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
extras_require={
'test': ['coverage', 'pytest'],
},
)
|
Bump version again. Descriptor emulation is more faithful.
|
Bump version again. Descriptor emulation is more faithful.
|
Python
|
mit
|
mwchase/class-namespaces,mwchase/class-namespaces
|
---
+++
@@ -19,7 +19,7 @@
setup(
name='class_namespaces',
- version='0.2.1',
+ version='0.3.0',
description='Class Namespaces',
long_description=long_description,
|
a15855f83d44eee4a8fac3aea97658d8d0051f96
|
setup.py
|
setup.py
|
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
version_classifiers = ['Programming Language :: Python :: %s' % version
for version in ['2', '2.5', '2.6', '2.7']]
other_classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: OS Independent',
'Topic :: Software Development :: Testing',
]
readme_file = open('README', 'rt')
try:
detailed_description = readme_file.read()
finally:
readme_file.close()
setup(
name="nosy",
version="1.1.1",
description="""\
Run the nose test discovery and execution tool whenever a source file
is changed.
""",
long_description=detailed_description,
author="Doug Latornell",
author_email="djl@douglatornell.ca",
url="http://douglatornell.ca/software/python/Nosy/",
license="New BSD License",
classifiers=version_classifiers + other_classifiers,
packages=find_packages(),
entry_points={'console_scripts':['nosy = nosy.nosy:main']}
)
|
from distribute_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
version_classifiers = ['Programming Language :: Python :: %s' % version
for version in ['2', '2.5', '2.6', '2.7']]
other_classifiers = [
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Environment :: Console',
'Operating System :: OS Independent',
'Topic :: Software Development :: Testing',
]
with open('README', 'rt') as file_obj:
detailed_description = file_obj.read()
with open('CHANGELOG', 'rt') as file_obj:
detailed_description += file_obj.read()
setup(
name="nosy",
version="1.1.1",
description="""\
Run the nose test discovery and execution tool whenever a source file
is changed.
""",
long_description=detailed_description,
author="Doug Latornell",
author_email="djl@douglatornell.ca",
url="http://douglatornell.ca/software/python/Nosy/",
license="New BSD License",
classifiers=version_classifiers + other_classifiers,
packages=find_packages(),
entry_points={'console_scripts':['nosy = nosy.nosy:main']}
)
|
Use context managers to build detailed_description.
|
Use context managers to build detailed_description.
|
Python
|
bsd-3-clause
|
dougbeal/nosy
|
---
+++
@@ -13,11 +13,10 @@
'Topic :: Software Development :: Testing',
]
-readme_file = open('README', 'rt')
-try:
- detailed_description = readme_file.read()
-finally:
- readme_file.close()
+with open('README', 'rt') as file_obj:
+ detailed_description = file_obj.read()
+with open('CHANGELOG', 'rt') as file_obj:
+ detailed_description += file_obj.read()
setup(
name="nosy",
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.