commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
5a4f52348d8174e9cb3c4c0b8bfe0baa50f70f31
|
tests/test_bot.py
|
tests/test_bot.py
|
from mock import patch
import logging
import congressbot
@patch('congressbot.house_collection')
@patch('congressbot.Reddit')
def test_feed_parse(reddit_mock, house_mock):
house_mock.find_one.return_value = False
congressbot.parse()
assert False
|
from mock import patch
import logging
import congressbot
@patch('congressbot.house_collection')
@patch('congressbot.Reddit')
def test_feed_parse(reddit_mock, house_mock):
house_mock.find_one.return_value = False
congressbot.parse()
assert False
def test_google_feed():
# Will need to be updated in the event of lasting world peace
assert congressbot.find_news_stories('war')
|
Add test for google feed
|
Add test for google feed
|
Python
|
unlicense
|
koshea/congressbot
|
---
+++
@@ -9,3 +9,7 @@
house_mock.find_one.return_value = False
congressbot.parse()
assert False
+
+def test_google_feed():
+ # Will need to be updated in the event of lasting world peace
+ assert congressbot.find_news_stories('war')
|
e88a0a27a4960f6b41170cffa0809423987db888
|
tests/test_transpiler.py
|
tests/test_transpiler.py
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except FileNotFoundError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
import os
import unittest
import transpiler
class TestTranspiler:
def test_transpiler_creates_files_without_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except OSError:
pass
transpiler.main(["--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
def test_transpiler_creates_files_with_format(self):
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
except OSError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
assert os.path.isfile("/tmp/auto_functions.cpp")
assert os.path.isfile("/tmp/auto_functions.h")
|
Fix error testing on python 2.7
|
Fix error testing on python 2.7
|
Python
|
mit
|
WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler,WesleyAC/lemonscript-transpiler
|
---
+++
@@ -9,7 +9,7 @@
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
- except FileNotFoundError:
+ except OSError:
pass
transpiler.main(["--output-dir", "/tmp"])
@@ -21,7 +21,7 @@
try:
os.remove("/tmp/auto_functions.cpp")
os.remove("/tmp/auto_functions.h")
- except FileNotFoundError:
+ except OSError:
pass
transpiler.main(["--format", "--output-dir", "/tmp"])
|
cddcc7e5735022c7a4faeee5331e7b80a6349406
|
src/functions.py
|
src/functions.py
|
def getTableColumnLabel(c):
label = ''
while True:
label += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26]
if c <= 26:
break
c = int(c/26)
return label
def parseTableColumnLabel(label):
ret = 0
for c in map(ord, reversed(label)):
if 0x41 <= c <= 0x5A:
ret = ret*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
|
def getTableColumnLabel(c):
label = ''
while True:
label = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26] + label
if c < 26:
break
c = c//26-1
return label
def parseTableColumnLabel(label):
if not label:
raise ValueError('Invalid label: %s' % label)
ret = -1
for c in map(ord, label):
if 0x41 <= c <= 0x5A:
ret = (ret+1)*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
|
Fix (parse|generate) table header label function
|
Fix (parse|generate) table header label function
|
Python
|
mit
|
takumak/tuna,takumak/tuna
|
---
+++
@@ -1,17 +1,19 @@
def getTableColumnLabel(c):
label = ''
while True:
- label += 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26]
- if c <= 26:
+ label = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'[c % 26] + label
+ if c < 26:
break
- c = int(c/26)
+ c = c//26-1
return label
def parseTableColumnLabel(label):
- ret = 0
- for c in map(ord, reversed(label)):
+ if not label:
+ raise ValueError('Invalid label: %s' % label)
+ ret = -1
+ for c in map(ord, label):
if 0x41 <= c <= 0x5A:
- ret = ret*26 + (c-0x41)
+ ret = (ret+1)*26 + (c-0x41)
else:
raise ValueError('Invalid label: %s' % label)
return ret
|
41a59d72049c8c33dc4531df3561186e3852c328
|
pack/util/codec.py
|
pack/util/codec.py
|
import urllib
def url_decode(string, encoding='utf8'):
return urllib.unquote(string)
def url_encode(string, encoding='utf8'):
return urllib.urlencode(string)
|
import urllib
def url_decode(string, encoding='utf8'):
return urllib.unquote_plus(string)
def url_encode(string, encoding='utf8'):
return urllib.urlencode(string)
|
Fix URL decoding (need to use urllib.unquote_plus).
|
Fix URL decoding (need to use urllib.unquote_plus).
|
Python
|
mit
|
adeel/pump
|
---
+++
@@ -1,7 +1,7 @@
import urllib
def url_decode(string, encoding='utf8'):
- return urllib.unquote(string)
+ return urllib.unquote_plus(string)
def url_encode(string, encoding='utf8'):
return urllib.urlencode(string)
|
c4d4ba61d1948bebecfadd540a77603fc9dda204
|
benchfunk/core/plotters.py
|
benchfunk/core/plotters.py
|
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
import matplotlib
matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
__all__ = ['plot_stack']
@TaskGenerator
def plot_stack(stack_results, problems=None, policies=None, name=''):
problems = problems if problems is not None else stack_results.key()
nfigs = len(problems)
fig = ezplot.figure(figsize=(5*nfigs, 4))
for i, expt in enumerate(problems):
results = stack_results[expt]
policies = policies if policies is not None else results.keys()
ax = fig.add_subplot(1, nfigs, i+1)
for policy in policies:
xbest, ybest = zip(*results[policy])
iters = np.arange(np.shape(ybest)[1])
mu = np.mean(ybest, axis=0)
std = np.std(ybest, axis=0) / np.sqrt(len(ybest))
ax.plot_banded(iters, mu, std, label=policy)
ax.set_title(expt, fontsize=16)
ax.legend(loc=0, fontsize=16)
ezplot.plt.savefig(name)
return fig
|
Fix plotter to use 'Agg'.
|
Fix plotter to use 'Agg'.
|
Python
|
bsd-2-clause
|
mwhoffman/benchfunk
|
---
+++
@@ -1,3 +1,5 @@
+import matplotlib
+matplotlib.use('Agg')
import numpy as np
from jug import TaskGenerator
import ezplot
|
c970661c4525e0f3a9c77935ccfbef62742b18d4
|
csympy/__init__.py
|
csympy/__init__.py
|
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add,
Mul, Pow, sin, cos, sqrt, function_symbol, I)
from .utilities import var
|
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add,
Mul, Pow, sin, cos, sqrt, function_symbol, I)
from .utilities import var
def test():
import pytest, os
return not pytest.cmdline.main(
[os.path.dirname(os.path.abspath(__file__))])
|
Add test function so tests can be run from within python terminal
|
Add test function so tests can be run from within python terminal
import csympy
csympy.test()
|
Python
|
mit
|
symengine/symengine.py,bjodah/symengine.py,bjodah/symengine.py,symengine/symengine.py,symengine/symengine.py,bjodah/symengine.py
|
---
+++
@@ -1,3 +1,8 @@
from .lib.csympy_wrapper import (Symbol, Integer, sympify, SympifyError, Add,
Mul, Pow, sin, cos, sqrt, function_symbol, I)
from .utilities import var
+
+def test():
+ import pytest, os
+ return not pytest.cmdline.main(
+ [os.path.dirname(os.path.abspath(__file__))])
|
fc350215a32586ac2233749924fa61078e8c780a
|
cosmic_ray/testing/unittest_runner.py
|
cosmic_ray/testing/unittest_runner.py
|
from itertools import chain
import unittest
from .test_runner import TestRunner
class UnittestRunner(TestRunner): # pylint:disable=no-init, too-few-public-methods
"""A TestRunner using `unittest`'s discovery mechanisms.
This treats the first element of `test_args` as a directory. This discovers
all tests under that directory and executes them.
All elements in `test_args` after the first are ignored.
"""
def _run(self):
suite = unittest.TestLoader().discover(self.test_args[0])
result = unittest.TestResult()
result.failfast = True
suite.run(result)
return (
result.wasSuccessful(),
[(str(r[0]), r[1])
for r in chain(result.errors,
result.failures)])
|
from itertools import chain
import unittest
from .test_runner import TestRunner
class UnittestRunner(TestRunner): # pylint:disable=no-init, too-few-public-methods
"""A TestRunner using `unittest`'s discovery mechanisms.
This treats the first element of `test_args` as a directory. This discovers
all tests under that directory and executes them.
All elements in `test_args` after the first are ignored.
"""
def _run(self):
suite = unittest.TestLoader().discover(self.test_args[0])
result = unittest.TestResult()
result.failfast = True
suite.run(result)
return (
result.wasSuccessful(),
[r[1] for r in chain(result.errors, result.failures)])
|
Return a list of strings for unittest results, not list of tuples
|
Return a list of strings for unittest results, not list of tuples
This is needed so the reporter can print a nicely formatted
traceback when the job is killed.
|
Python
|
mit
|
sixty-north/cosmic-ray
|
---
+++
@@ -22,6 +22,4 @@
return (
result.wasSuccessful(),
- [(str(r[0]), r[1])
- for r in chain(result.errors,
- result.failures)])
+ [r[1] for r in chain(result.errors, result.failures)])
|
f0ed7130172a3c5c70c2147919b6e213f065c2c2
|
open_journal.py
|
open_journal.py
|
import sublime, sublime_plugin
import os, string
import re
from datetime import date
try:
from MarkdownEditing.wiki_page import *
except ImportError:
from wiki_page import *
try:
from MarkdownEditing.mdeutils import *
except ImportError:
from mdeutils import *
class OpenJournalCommand(MDETextCommand):
def run(self, edit):
print("Running OpenJournalCommand")
today = date.today()
name = today.strftime('%Y-%m-%d')
wiki_page = WikiPage(self.view)
wiki_page.select_page(name)
|
import sublime, sublime_plugin
import os, string
import re
from datetime import date
try:
from MarkdownEditing.wiki_page import *
except ImportError:
from wiki_page import *
try:
from MarkdownEditing.mdeutils import *
except ImportError:
from mdeutils import *
DEFAULT_DATE_FORMAT = '%Y-%m-%d'
class OpenJournalCommand(MDETextCommand):
def run(self, edit):
print("Running OpenJournalCommand")
today = date.today()
date_format = self.view.settings().get("mde.journal.dateformat", DEFAULT_DATE_FORMAT)
name = today.strftime(date_format)
wiki_page = WikiPage(self.view)
wiki_page.select_page(name)
|
Add parameter to choose journal date format
|
Add parameter to choose journal date format
This allows for other journal date formats to be permissible, adding an optional date format parameter to the setting file.
|
Python
|
mit
|
SublimeText-Markdown/MarkdownEditing
|
---
+++
@@ -14,12 +14,14 @@
except ImportError:
from mdeutils import *
-
+DEFAULT_DATE_FORMAT = '%Y-%m-%d'
+
class OpenJournalCommand(MDETextCommand):
def run(self, edit):
print("Running OpenJournalCommand")
today = date.today()
- name = today.strftime('%Y-%m-%d')
+ date_format = self.view.settings().get("mde.journal.dateformat", DEFAULT_DATE_FORMAT)
+ name = today.strftime(date_format)
wiki_page = WikiPage(self.view)
wiki_page.select_page(name)
|
7a2fd849a80db2407fb6c734c02c21a2a9b9a66e
|
forms/management/commands/assign_missing_perms.py
|
forms/management/commands/assign_missing_perms.py
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User, Group
from django.contrib import admin
from gmmp.models import Monitor
from forms.admin import (
RadioSheetAdmin, TwitterSheetAdmin, InternetNewsSheetAdmin,
NewspaperSheetAdmin, TelevisionSheetAdmin)
from forms.models import (
RadioSheet, TwitterSheet, InternetNewsSheet, NewspaperSheet,
TelevisionSheet)
from sheets import (radio_sheets, twitter_sheets, internetnews_sheets,
newspaper_sheets, television_sheets)
class Command(BaseCommand):
def handle(self, *args, **options):
sheet_types = [
(RadioSheet, RadioSheetAdmin, radio_sheets),
(TwitterSheet, TwitterSheetAdmin, twitter_sheets),
(InternetnewsSheet, InternetNewsSheetAdmin, internetnews_sheets),
(NewspaperSheet, NewspaperSheetAdmin, newspaper_sheets),
(TelevisionSheet, TelevisionSheetAdmin, television_sheets)
]
for model, model_admin, sheet_monitor_list in sheet_types:
admin_obj =model_admin(model, admin.site)
for sheet_id, monitor_id in sheet_monitor_list:
user = User.objects.get(monitor__id=monitor_id)
sheet_obj = model.objects.get(id=sheet_id)
admin_obj.assign_permissions(user, sheet_obj)
|
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User, Group
from django.contrib import admin
from gmmp.models import Monitor
from forms.admin import (
RadioSheetAdmin, TwitterSheetAdmin, InternetNewsSheetAdmin,
NewspaperSheetAdmin, TelevisionSheetAdmin)
from forms.models import (
RadioSheet, TwitterSheet, InternetNewsSheet, NewspaperSheet,
TelevisionSheet)
from sheets import (radio_sheets, twitter_sheets, internetnews_sheets,
newspaper_sheets, television_sheets)
sheet_types = [
(RadioSheet, RadioSheetAdmin, radio_sheets),
(TwitterSheet, TwitterSheetAdmin, twitter_sheets),
(InternetnewsSheet, InternetNewsSheetAdmin, internetnews_sheets),
(NewspaperSheet, NewspaperSheetAdmin, newspaper_sheets),
(TelevisionSheet, TelevisionSheetAdmin, television_sheets)
]
class Command(BaseCommand):
def handle(self, *args, **options):
for model, model_admin, sheet_monitor_list in sheet_types:
admin_obj =model_admin(model, admin.site)
for sheet_id, monitor_id in sheet_monitor_list:
user = User.objects.get(monitor__id=monitor_id)
sheet_obj = model.objects.get(id=sheet_id)
admin_obj.assign_permissions(user, sheet_obj)
|
Revert "Clean up a bit"
|
Revert "Clean up a bit"
This reverts commit 4500b571e2fd7a35cf722c3c9cc2fab7ea942cba.
|
Python
|
apache-2.0
|
Code4SA/gmmp,Code4SA/gmmp,Code4SA/gmmp
|
---
+++
@@ -15,17 +15,17 @@
newspaper_sheets, television_sheets)
+sheet_types = [
+ (RadioSheet, RadioSheetAdmin, radio_sheets),
+ (TwitterSheet, TwitterSheetAdmin, twitter_sheets),
+ (InternetnewsSheet, InternetNewsSheetAdmin, internetnews_sheets),
+ (NewspaperSheet, NewspaperSheetAdmin, newspaper_sheets),
+ (TelevisionSheet, TelevisionSheetAdmin, television_sheets)
+]
+
class Command(BaseCommand):
def handle(self, *args, **options):
-
- sheet_types = [
- (RadioSheet, RadioSheetAdmin, radio_sheets),
- (TwitterSheet, TwitterSheetAdmin, twitter_sheets),
- (InternetnewsSheet, InternetNewsSheetAdmin, internetnews_sheets),
- (NewspaperSheet, NewspaperSheetAdmin, newspaper_sheets),
- (TelevisionSheet, TelevisionSheetAdmin, television_sheets)
- ]
for model, model_admin, sheet_monitor_list in sheet_types:
admin_obj =model_admin(model, admin.site)
|
2a99fc24fec47b741359e3118969ba0f4d874e41
|
SettingsObject.py
|
SettingsObject.py
|
"""
This class is used in kaggle competitions
"""
import json
class Settings():
train_path = None
test_path = None
model_path = None
submission_path = None
string_train_path = "TRAIN_DATA_PATH"
string_test_path = "TEST_DATA_PATH"
string_model_path = "MODEL_PATH"
string_submission_path = "SUBMISSION_PATH"
def __init__(self):
self._load_settings()
def __str__(self):
to_print = "\n".join([self.string_train_path,
self.train_path,
self.string_test_path,
self.test_path,
self.string_model_path,
self.model_path,
self.string_submission_path,
self.submission_path])
return to_print
def _load_settings(self):
with open('SETTINGS.json') as json_data:
settings = json.load(json_data)
self.train_path=settings[self.string_train_path]
self.test_path=settings[self.string_test_path]
self.model_path=settings[self.string_model_path]
self.submission_path=settings[self.string_submission_path]
s = Settings()
|
"""
This class is used in kaggle competitions
"""
import json
class Settings():
train_path = None
test_path = None
model_path = None
submission_path = None
string_train_path = "TRAIN_DATA_PATH"
string_test_path = "TEST_DATA_PATH"
string_model_path = "MODEL_PATH"
string_submission_path = "SUBMISSION_PATH"
def __init__(self):
self._load_settings()
def __str__(self):
to_print = "\n".join([self.string_train_path,
self.train_path,
self.string_test_path,
self.test_path,
self.string_model_path,
self.model_path,
self.string_submission_path,
self.submission_path])
return to_print
def _load_settings(self):
with open('SETTINGS.json') as json_data:
settings = json.load(json_data)
self.train_path=settings[self.string_train_path]
self.test_path=settings[self.string_test_path]
self.model_path=settings[self.string_model_path]
self.submission_path=settings[self.string_submission_path]
|
Remove not necessary code in Setting class
|
Remove not necessary code in Setting class
|
Python
|
apache-2.0
|
Gabvaztor/TFBoost
|
---
+++
@@ -34,6 +34,3 @@
self.model_path=settings[self.string_model_path]
self.submission_path=settings[self.string_submission_path]
-
-s = Settings()
-
|
ec032ab20de8d3f4d56d7d6901dd73c2bc2ada56
|
back_end/api.py
|
back_end/api.py
|
from bottle import get, route
import redis
import json
from datetime import datetime
RED = redis.ConnectionPool(host='redis_01',port=6379,db=0)
#RED = redis.ConnectionPool(host='tuchfarber.com',port=6379,db=0)
LENGTH_OF_PREG = 280
@get('/api/test')
def index():
return {'status':'fuck you'}
@get('/api/onthislay/<date>')
def return_date(date):
redis_server = redis.Redis(connection_pool=RED)
data = {}
data['data'] = {}
response = redis_server.lrange('dates:' + date, 0, -1)
data['data']['detail'] = response[0].decode("utf-8")
data['data']['day'] = date
return json.dumps(data)
|
from bottle import get, route
import redis
import json
import sys
import random
from datetime import date, timedelta
#RED = redis.ConnectionPool(host='redis_01',port=6379,db=0)
RED = redis.ConnectionPool(host='tuchfarber.com',port=6379,db=0)
LENGTH_OF_PREG = 280
WEEK = 7
@get('/api/test')
def index():
return {'status':'fuck you'}
@get('/api/onthislay/<date>')
def return_date(sent_date):
#Create the redis connection
redis_server = redis.Redis(connection_pool=RED)
# Init dictionary
data = {}
data['data'] = {}
birthday = get_date(sent_date)
conception = birthday - timedelta(LENGTH_OF_PREG + WEEK)
all_events = {}
for i in range(1, 14):
possible_conception_date = conception + timedelta(i)
sys.stdout.write(possible_conception_date.isoformat() + "\n")
sys.stdout.flush()
response = redis_server.lrange('dates:' +
possible_conception_date.isoformat(),
0, -1)
if len(response) > 0:
data['data']['detail'] = response[0].decode("utf-8")
data['data']['day'] = sent_date
all_events[possible_conception_date] = json.dumps(data)
# key_to_use = random.choice(all_events.keys())
for key, value in all_events.items():
return all_events[key]
# sys.stdout.write('Date: ' + key.isoformat() + "\n")
# sys.stdout.write('Value: ' + value + "\n")
# sys.stdout.flush()
def get_date(sent_date):
splitd = sent_date.split('-')
return date(int(splitd[0]), int(splitd[1]), int(splitd[2]))
|
Return details from possible conception date
|
Return details from possible conception date
|
Python
|
mit
|
tuchfarber/tony-hawkathon-2016,tuchfarber/tony-hawkathon-2016,tuchfarber/tony-hawkathon-2016
|
---
+++
@@ -1,23 +1,54 @@
from bottle import get, route
import redis
import json
-from datetime import datetime
+import sys
+import random
+from datetime import date, timedelta
-RED = redis.ConnectionPool(host='redis_01',port=6379,db=0)
-#RED = redis.ConnectionPool(host='tuchfarber.com',port=6379,db=0)
+#RED = redis.ConnectionPool(host='redis_01',port=6379,db=0)
+RED = redis.ConnectionPool(host='tuchfarber.com',port=6379,db=0)
LENGTH_OF_PREG = 280
+WEEK = 7
@get('/api/test')
def index():
return {'status':'fuck you'}
@get('/api/onthislay/<date>')
-def return_date(date):
+def return_date(sent_date):
+ #Create the redis connection
redis_server = redis.Redis(connection_pool=RED)
+
+ # Init dictionary
data = {}
data['data'] = {}
- response = redis_server.lrange('dates:' + date, 0, -1)
- data['data']['detail'] = response[0].decode("utf-8")
- data['data']['day'] = date
- return json.dumps(data)
+
+
+ birthday = get_date(sent_date)
+ conception = birthday - timedelta(LENGTH_OF_PREG + WEEK)
+
+ all_events = {}
+
+ for i in range(1, 14):
+ possible_conception_date = conception + timedelta(i)
+ sys.stdout.write(possible_conception_date.isoformat() + "\n")
+ sys.stdout.flush()
+ response = redis_server.lrange('dates:' +
+ possible_conception_date.isoformat(),
+ 0, -1)
+ if len(response) > 0:
+ data['data']['detail'] = response[0].decode("utf-8")
+ data['data']['day'] = sent_date
+ all_events[possible_conception_date] = json.dumps(data)
+
+ # key_to_use = random.choice(all_events.keys())
+ for key, value in all_events.items():
+ return all_events[key]
+ # sys.stdout.write('Date: ' + key.isoformat() + "\n")
+ # sys.stdout.write('Value: ' + value + "\n")
+ # sys.stdout.flush()
+def get_date(sent_date):
+ splitd = sent_date.split('-')
+ return date(int(splitd[0]), int(splitd[1]), int(splitd[2]))
+
|
60150d28ed815095cfe16bd7c7170fd4f47cf86e
|
bacman/mysql.py
|
bacman/mysql.py
|
import os
from .bacman import BacMan
class MySQL(BacMan):
"""Take a snapshot of a MySQL DB."""
filename_prefix = os.environ.get('BACMAN_PREFIX', 'mysqldump')
def get_command(self, path):
command_string = "mysqldump -u {user} -p{password} -h {host} {name} > {path}"
command = command_string.format(user=self.user,
password=self.password,
host=self.host,
name=self.name,
path=path)
return command
if __name__ == "__main__":
MySQL()
|
import os
from .bacman import BacMan
class MySQL(BacMan):
"""Take a snapshot of a MySQL DB."""
filename_prefix = os.environ.get('BACMAN_PREFIX', 'mysqldump')
def get_command(self, path):
command_string = "mysqldump -u {user} -p{password} -h {host} {name} > {path}"
command = command_string.format(user=self.user,
password=self.password,
host=self.host,
name=self.name,
path=path)
return command
if __name__ == "__main__":
MySQL()
|
Add whitespace in order to be PEP8 compliant
|
Add whitespace in order to be PEP8 compliant
|
Python
|
bsd-3-clause
|
willandskill/bacman
| |
fe07b97223dfffb789611b8b2cd043b628f8cef6
|
preview.py
|
preview.py
|
from PySide import QtGui, QtCore, QtWebKit
class Preview(QtWebKit.QWebView):
def __init__(self, parent=None):
super(Preview, self).__init__(parent)
self.load(QtCore.QUrl.fromLocalFile("/Users/audreyr/code/pydream-repos/rstpreviewer/testfiles/contributing.html"))
|
from PySide import QtGui, QtCore, QtWebKit
from unipath import Path
class Preview(QtWebKit.QWebView):
def __init__(self, parent=None):
super(Preview, self).__init__(parent)
# TODO: Load HTML from real Sphinx output file
output_html_path = Path("testfiles/contributing.html").absolute()
self.load(QtCore.QUrl.fromLocalFile(output_html_path))
|
Use Unipath for relative paths.
|
Use Unipath for relative paths.
|
Python
|
bsd-3-clause
|
techdragon/sphinx-gui,audreyr/sphinx-gui,techdragon/sphinx-gui,audreyr/sphinx-gui
|
---
+++
@@ -1,7 +1,11 @@
from PySide import QtGui, QtCore, QtWebKit
+from unipath import Path
class Preview(QtWebKit.QWebView):
def __init__(self, parent=None):
super(Preview, self).__init__(parent)
- self.load(QtCore.QUrl.fromLocalFile("/Users/audreyr/code/pydream-repos/rstpreviewer/testfiles/contributing.html"))
+
+ # TODO: Load HTML from real Sphinx output file
+ output_html_path = Path("testfiles/contributing.html").absolute()
+ self.load(QtCore.QUrl.fromLocalFile(output_html_path))
|
c51cdb577a97817569deac68f5f07401eb99cf38
|
pygp/inference/basic.py
|
pygp/inference/basic.py
|
"""
Simple wrapper class for a Basic GP.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from .exact import ExactGP
from ..likelihoods import Gaussian
from ..kernels import SE, Matern
from ..utils.models import Printable
# exported symbols
__all__ = ['BasicGP']
# NOTE: in the definition of the BasicGP class Printable has to come first so
# that we use the __repr__ method defined there and override the base method.
class BasicGP(Printable, ExactGP):
def __init__(self, sn, sf, ell, ndim=None, kernel='SE'):
likelihood = Gaussian(sn)
kernel = (
SE(sf, ell, ndim) if (kernel == 'SE') else
Matern(sf, ell, 1, ndim) if (kernel == 'Matern1') else
Matern(sf, ell, 3, ndim) if (kernel == 'Matern3') else
Matern(sf, ell, 5, ndim) if (kernel == 'Matern5') else None)
if kernel is None:
raise RuntimeError('Unknown kernel type')
super(BasicGP, self).__init__(likelihood, kernel)
def _params(self):
# replace the parameters for the base GP model with a simplified
# structure and rename the likelihood's sigma parameter to sn (ie its
# the sigma corresponding to the noise).
return [('sn', 1)] + self._kernel._params()
|
"""
Simple wrapper class for a Basic GP.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# global imports
import numpy as np
# local imports
from .exact import ExactGP
from ..likelihoods import Gaussian
from ..kernels import SE, Matern
from ..utils.models import Printable
# exported symbols
__all__ = ['BasicGP']
# NOTE: in the definition of the BasicGP class Printable has to come first so
# that we use the __repr__ method defined there and override the base method.
class BasicGP(Printable, ExactGP):
def __init__(self, sn, sf, ell, ndim=None, kernel='se'):
likelihood = Gaussian(sn)
kernel = (
SE(sf, ell, ndim) if (kernel == 'se') else
Matern(sf, ell, 1, ndim) if (kernel == 'matern1') else
Matern(sf, ell, 3, ndim) if (kernel == 'matern3') else
Matern(sf, ell, 5, ndim) if (kernel == 'matern5') else None)
if kernel is None:
raise RuntimeError('Unknown kernel type')
super(BasicGP, self).__init__(likelihood, kernel)
def _params(self):
# replace the parameters for the base GP model with a simplified
# structure and rename the likelihood's sigma parameter to sn (ie its
# the sigma corresponding to the noise).
return [('sn', 1)] + self._kernel._params()
|
Make BasicGP kernel strings lowercase.
|
Make BasicGP kernel strings lowercase.
|
Python
|
bsd-2-clause
|
mwhoffman/pygp
|
---
+++
@@ -24,13 +24,13 @@
# that we use the __repr__ method defined there and override the base method.
class BasicGP(Printable, ExactGP):
- def __init__(self, sn, sf, ell, ndim=None, kernel='SE'):
+ def __init__(self, sn, sf, ell, ndim=None, kernel='se'):
likelihood = Gaussian(sn)
kernel = (
- SE(sf, ell, ndim) if (kernel == 'SE') else
- Matern(sf, ell, 1, ndim) if (kernel == 'Matern1') else
- Matern(sf, ell, 3, ndim) if (kernel == 'Matern3') else
- Matern(sf, ell, 5, ndim) if (kernel == 'Matern5') else None)
+ SE(sf, ell, ndim) if (kernel == 'se') else
+ Matern(sf, ell, 1, ndim) if (kernel == 'matern1') else
+ Matern(sf, ell, 3, ndim) if (kernel == 'matern3') else
+ Matern(sf, ell, 5, ndim) if (kernel == 'matern5') else None)
if kernel is None:
raise RuntimeError('Unknown kernel type')
|
b64e2c30b0b3da3b77295469fac944ae18d4e6dc
|
publish/twitter.py
|
publish/twitter.py
|
"""Twitter delivery mechanism for botfriend."""
from nose.tools import set_trace
import tweepy
from bot import Publisher
class TwitterPublisher(Publisher):
def __init__(
self, bot, full_config, kwargs
):
for key in ['consumer_key', 'consumer_secret', 'access_token',
'access_token_secret']:
if not key in kwargs:
raise ValueError(
"Missing required Twitter configuration key %s" % key
)
auth = tweepy.OAuthHandler(kwargs['consumer_key'], kwargs['consumer_secret'])
auth.set_access_token(kwargs['access_token'], kwargs['access_token_secret'])
self.api = tweepy.API(auth)
def twitter_safe(self, content):
content = unicode(content)
content = unicodedata.normalize('NFC', content)
content = content.encode("utf8")
# TODO: replace initial D., M. etc.
return content[:140]
def publish(self, post, publication):
content = self.twitter_safe(post.content)
# TODO: update_with_media would go here if there were attachments
# on the Post.
try:
response = self.api.update_status(content)
publication.report_success()
except tweepy.error.TweepError, e:
publication.report_failure(e)
Publisher = TwitterPublisher
|
# encoding: utf-8
"""Twitter delivery mechanism for botfriend."""
import re
import unicodedata
from nose.tools import set_trace
import tweepy
from bot import Publisher
class TwitterPublisher(Publisher):
def __init__(
self, bot, full_config, kwargs
):
for key in ['consumer_key', 'consumer_secret', 'access_token',
'access_token_secret']:
if not key in kwargs:
raise ValueError(
"Missing required Twitter configuration key %s" % key
)
auth = tweepy.OAuthHandler(kwargs['consumer_key'], kwargs['consumer_secret'])
auth.set_access_token(kwargs['access_token'], kwargs['access_token_secret'])
self.api = tweepy.API(auth)
def twitter_safe(self, content):
return _twitter_safe(content)
def publish(self, post, publication):
content = self.twitter_safe(post.content)
# TODO: update_with_media would go here if there were attachments
# on the Post.
try:
response = self.api.update_status(content)
publication.report_success()
except tweepy.error.TweepError, e:
publication.report_failure(e)
def _twitter_safe(content):
"""Turn a string into something that won't get rejected by Twitter."""
content = unicode(content)
content = unicodedata.normalize('NFC', content)
for bad, replace in ('D', u'𝙳'), ('M', u'𝙼'):
if any(content.startswith(x) for x in (bad + ' ', bad + '.')):
content = re.compile("^%s" % bad).sub(replace, content)
content = content.encode("utf8")
return content[:140]
Publisher = TwitterPublisher
|
Replace initial D. and M. with similar-looking characters to get around archaic Twitter restriction.
|
Replace initial D. and M. with similar-looking characters to get around archaic Twitter restriction.
|
Python
|
mit
|
leonardr/botfriend
|
---
+++
@@ -1,4 +1,7 @@
+# encoding: utf-8
"""Twitter delivery mechanism for botfriend."""
+import re
+import unicodedata
from nose.tools import set_trace
import tweepy
from bot import Publisher
@@ -17,13 +20,9 @@
auth = tweepy.OAuthHandler(kwargs['consumer_key'], kwargs['consumer_secret'])
auth.set_access_token(kwargs['access_token'], kwargs['access_token_secret'])
self.api = tweepy.API(auth)
-
+
def twitter_safe(self, content):
- content = unicode(content)
- content = unicodedata.normalize('NFC', content)
- content = content.encode("utf8")
- # TODO: replace initial D., M. etc.
- return content[:140]
+ return _twitter_safe(content)
def publish(self, post, publication):
content = self.twitter_safe(post.content)
@@ -34,5 +33,16 @@
publication.report_success()
except tweepy.error.TweepError, e:
publication.report_failure(e)
-
+
+def _twitter_safe(content):
+ """Turn a string into something that won't get rejected by Twitter."""
+ content = unicode(content)
+ content = unicodedata.normalize('NFC', content)
+ for bad, replace in ('D', u'𝙳'), ('M', u'𝙼'):
+ if any(content.startswith(x) for x in (bad + ' ', bad + '.')):
+ content = re.compile("^%s" % bad).sub(replace, content)
+ content = content.encode("utf8")
+ return content[:140]
+
Publisher = TwitterPublisher
+
|
41e426457c93fc5e0a785614c090a24aaf2e37f5
|
py/foxgami/user.py
|
py/foxgami/user.py
|
from . import db
class Users(object):
@classmethod
def get_current(cls):
return {
'data': {
'id': 1,
'type': 'user',
'name': 'Albert Sheu',
'short_name': 'Albert',
'profile_image_url': 'https://google.com'
}
}
|
from . import db
class Users(object):
@classmethod
def get_current(cls):
return {
'data': {
'id': 1,
'type': 'user',
'name': 'Albert Sheu',
'short_name': 'Albert',
'profile_image_url': 'http://flubstep.com/images/sunglasses.jpg'
}
}
|
Make stub image url a real one
|
Make stub image url a real one
|
Python
|
mit
|
flubstep/foxgami.com,flubstep/foxgami.com
|
---
+++
@@ -10,6 +10,6 @@
'type': 'user',
'name': 'Albert Sheu',
'short_name': 'Albert',
- 'profile_image_url': 'https://google.com'
+ 'profile_image_url': 'http://flubstep.com/images/sunglasses.jpg'
}
}
|
171b0c16698b47a6b0771f2ec2de01079c9a8041
|
src/armet/connectors/cyclone/http.py
|
src/armet/connectors/cyclone/http.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
from armet import utils
from armet.http import request, response
class Request(request.Request):
"""Implements the request abstraction for cyclone.
"""
@property
@utils.memoize_single
def method(self):
pass
def __getitem__(self):
pass
def __iter__(self):
pass
def __len__(self):
pass
class Response(response.Response):
"""Implements the response abstraction for cyclone.
"""
|
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
from armet.http import request, response
class Request(request.Request):
"""Implements the request abstraction for cyclone.
"""
def __init__(self, handler):
self.handler = handler
# This is the python request object
self.request = handler.request
@property
def url(self):
return self.request.full_url()
@property
def path(self):
return self.request.path
@path.setter
def path(self, value):
self.request.path = value
@property
def method(self):
return self.request.method
@method.setter
def method(self, value):
self.request.method = value.upper()
def __getitem__(self, name):
return self.request.headers[name]
def __iter__(self):
return iter(self.request.headers)
def __len__(self):
return len(self.request.headers)
def __contains__(self, item):
return item in self.request.headers
class Response(response.Response):
"""Implements the response abstraction for cyclone.
"""
def __init__(self, handler):
self.handler = handler
def __setitem__(self, name, value):
self.handler.set_header(name, value)
def __getitem__(self, name):
# Cyclone doesn't provide a way to get headers normally, so break
# into the private methods to retrieve the header. Note that
# this doesn't retrieve multi-value headers. However, armet should
# handle multi-value wrangling itself.
return self.handler._headers[name]
|
Implement the cyclone request/response object
|
Implement the cyclone request/response object
|
Python
|
mit
|
armet/python-armet
|
---
+++
@@ -1,6 +1,5 @@
# -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals, division
-from armet import utils
from armet.http import request, response
@@ -8,21 +7,58 @@
"""Implements the request abstraction for cyclone.
"""
+ def __init__(self, handler):
+ self.handler = handler
+
+ # This is the python request object
+ self.request = handler.request
+
@property
- @utils.memoize_single
+ def url(self):
+ return self.request.full_url()
+
+ @property
+ def path(self):
+ return self.request.path
+
+ @path.setter
+ def path(self, value):
+ self.request.path = value
+
+ @property
def method(self):
- pass
+ return self.request.method
- def __getitem__(self):
- pass
+ @method.setter
+ def method(self, value):
+ self.request.method = value.upper()
+
+ def __getitem__(self, name):
+ return self.request.headers[name]
def __iter__(self):
- pass
+ return iter(self.request.headers)
def __len__(self):
- pass
+ return len(self.request.headers)
+
+ def __contains__(self, item):
+ return item in self.request.headers
class Response(response.Response):
"""Implements the response abstraction for cyclone.
"""
+
+ def __init__(self, handler):
+ self.handler = handler
+
+ def __setitem__(self, name, value):
+ self.handler.set_header(name, value)
+
+ def __getitem__(self, name):
+ # Cyclone doesn't provide a way to get headers normally, so break
+ # into the private methods to retrieve the header. Note that
+ # this doesn't retrieve multi-value headers. However, armet should
+ # handle multi-value wrangling itself.
+ return self.handler._headers[name]
|
ce9f5551ec7173cc132eb1271e0fc2c1bbfaa7ce
|
apps/worker/src/main/core/node.py
|
apps/worker/src/main/core/node.py
|
from syft.core.node.vm.vm import VirtualMachine
node = VirtualMachine(name="om-vm")
|
from syft.core.node.device.device import Device
from syft.grid.services.vm_management_service import CreateVMService
node = Device(name="om-device")
node.immediate_services_with_reply.append(CreateVMService)
node._register_services() # re-register all services including SignalingService
|
ADD CreateVMService at Device APP
|
ADD CreateVMService at Device APP
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
---
+++
@@ -1,3 +1,6 @@
-from syft.core.node.vm.vm import VirtualMachine
+from syft.core.node.device.device import Device
+from syft.grid.services.vm_management_service import CreateVMService
-node = VirtualMachine(name="om-vm")
+node = Device(name="om-device")
+node.immediate_services_with_reply.append(CreateVMService)
+node._register_services() # re-register all services including SignalingService
|
a4dc87b5a9b555f74efa9bfe2bd16af5340d1199
|
googlesearch/googlesearch.py
|
googlesearch/googlesearch.py
|
#!/usr/bin/python
import json
import urllib
def showsome(searchfor):
query = urllib.urlencode({'q': searchfor})
url = 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&%s' % query
search_response = urllib.urlopen(url)
search_results = search_response.read()
results = json.loads(search_results)
data = results['responseData']
print 'Total results: %s' % data['cursor']['estimatedResultCount']
hits = data['results']
print 'Top %d hits:' % len(hits)
for h in hits: print ' ', h['url']
print 'For more results, see %s' % data['cursor']['moreResultsUrl']
showsome('gallery.php-systems.com')
|
#!/usr/bin/python
import json
import urllib
import sys
def showsome(searchfor):
query = urllib.urlencode({'q': searchfor})
url = 'http://ajax.googleapis.com/ajax/services/search/web?v=1.0&%s' % query
search_response = urllib.urlopen(url)
search_results = search_response.read()
results = json.loads(search_results)
data = results['responseData']
print 'Total results: %s' % data['cursor']['estimatedResultCount']
hits = data['results']
print 'Top %d hits:' % len(hits)
for h in hits: print ' ', h['url']
print 'For more results, see %s' % data['cursor']['moreResultsUrl']
if __name__ == '__main__':
if len (sys.argv) < 2:
print 'Usage: %s <search term>' % sys.argv[0]
print ' Try using quotes if you want multiple terms'
print ' Eg %s \"test results\"' % sys.argv[0]
sys.exit(1)
showsome(sys.argv[1])
|
Update of the google search code to be a command line program.
|
Update of the google search code to be a command line program.
|
Python
|
apache-2.0
|
phpsystems/code,phpsystems/code
|
---
+++
@@ -1,6 +1,7 @@
#!/usr/bin/python
import json
import urllib
+import sys
def showsome(searchfor):
query = urllib.urlencode({'q': searchfor})
@@ -15,4 +16,11 @@
for h in hits: print ' ', h['url']
print 'For more results, see %s' % data['cursor']['moreResultsUrl']
-showsome('gallery.php-systems.com')
+
+if __name__ == '__main__':
+ if len (sys.argv) < 2:
+ print 'Usage: %s <search term>' % sys.argv[0]
+ print ' Try using quotes if you want multiple terms'
+ print ' Eg %s \"test results\"' % sys.argv[0]
+ sys.exit(1)
+ showsome(sys.argv[1])
|
afb58da6ecc11a1c92d230bc2dcbb06464cc4f32
|
percept/workflows/commands/run_flow.py
|
percept/workflows/commands/run_flow.py
|
"""
Given a config file, run a given workflow
"""
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
|
"""
Given a config file, run a given workflow
"""
from percept.management.commands import BaseCommand
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
from optparse import make_option
import IPython
import logging
log = logging.getLogger(__name__)
class Command(BaseCommand):
args = 'config_file'
option_list = BaseCommand.option_list + (make_option('--shell',
help='Whether or not to load a shell afterwards".'),)
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
if '--shell' in options:
ns = {
'flow' : wrapper.workflow,
'tasks' : wrapper.workflow.tasks
}
IPython.embed(user_ns=ns)
|
Add in a way to start a shell using the results of a workflow
|
Add in a way to start a shell using the results of a workflow
|
Python
|
apache-2.0
|
VikParuchuri/percept,VikParuchuri/percept
|
---
+++
@@ -6,6 +6,8 @@
from percept.utils.registry import registry, find_in_registry
from percept.workflows.base import NaiveWorkflow
from percept.utils.workflow import WorkflowWrapper, WorkflowLoader
+from optparse import make_option
+import IPython
import logging
log = logging.getLogger(__name__)
@@ -13,9 +15,20 @@
class Command(BaseCommand):
args = 'config_file'
+ option_list = BaseCommand.option_list + (make_option('--shell',
+ help='Whether or not to load a shell afterwards".'),)
+
def command(self, *args, **options):
config_file = args[0]
wrapper = WorkflowWrapper(config_file, NaiveWorkflow)
wrapper.run()
+ if '--shell' in options:
+ ns = {
+ 'flow' : wrapper.workflow,
+ 'tasks' : wrapper.workflow.tasks
+ }
+
+ IPython.embed(user_ns=ns)
+
|
3a9359660ff4c782e0de16e8115b754a3e17d7e7
|
inthe_am/taskmanager/models/usermetadata.py
|
inthe_am/taskmanager/models/usermetadata.py
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
class UserMetadata(models.Model):
user = models.ForeignKey(
User, related_name="metadata", unique=True, on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
privacy_policy_version = models.IntegerField(default=0)
privacy_policy_accepted = models.DateTimeField(default=None, null=True,)
colorscheme = models.CharField(default="dark-yellow-green.theme", max_length=255,)
@property
def tos_up_to_date(self):
return self.tos_version == settings.TOS_VERSION
@property
def privacy_policy_up_to_date(self):
return self.privacy_policy_version == settings.PRIVACY_POLICY_VERSION
@classmethod
def get_for_user(cls, user):
meta, created = UserMetadata.objects.get_or_create(user=user)
return meta
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
from . import TaskStore
if self.tos_up_to_date and self.privacy_policy_up_to_date:
store = TaskStore.get_for_user(self.user)
store.taskd_account.resume()
def __str__(self):
return self.user.username
class Meta:
app_label = "taskmanager"
|
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
class UserMetadata(models.Model):
user = models.OneToOneField(
User, related_name="metadata", on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
privacy_policy_version = models.IntegerField(default=0)
privacy_policy_accepted = models.DateTimeField(default=None, null=True,)
colorscheme = models.CharField(default="dark-yellow-green.theme", max_length=255,)
@property
def tos_up_to_date(self):
return self.tos_version == settings.TOS_VERSION
@property
def privacy_policy_up_to_date(self):
return self.privacy_policy_version == settings.PRIVACY_POLICY_VERSION
@classmethod
def get_for_user(cls, user):
meta, created = UserMetadata.objects.get_or_create(user=user)
return meta
def save(self, *args, **kwargs):
super().save(*args, **kwargs)
from . import TaskStore
if self.tos_up_to_date and self.privacy_policy_up_to_date:
store = TaskStore.get_for_user(self.user)
store.taskd_account.resume()
def __str__(self):
return self.user.username
class Meta:
app_label = "taskmanager"
|
Change mapping to avoid warning
|
Change mapping to avoid warning
|
Python
|
agpl-3.0
|
coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am,coddingtonbear/inthe.am
|
---
+++
@@ -4,8 +4,8 @@
class UserMetadata(models.Model):
- user = models.ForeignKey(
- User, related_name="metadata", unique=True, on_delete=models.CASCADE
+ user = models.OneToOneField(
+ User, related_name="metadata", on_delete=models.CASCADE
)
tos_version = models.IntegerField(default=0)
tos_accepted = models.DateTimeField(default=None, null=True,)
|
e1b36955d2a4e3eb4f36d75b4393cd510e3ddcab
|
workshopper/exercises.py
|
workshopper/exercises.py
|
class Exercise(object):
name = None
title = None
def __init__(self, workshop):
self.workshop = workshop
def get_name(self):
return self.name
def get_title(self):
return self.title
|
class Exercise(object):
title = None
def __init__(self, workshop):
self.workshop = workshop
@property
def name(self):
# TODO: Get from file
return ''
|
Add name property to exercise.
|
Add name property to exercise.
|
Python
|
mit
|
pyschool/story
|
---
+++
@@ -1,13 +1,11 @@
class Exercise(object):
- name = None
title = None
def __init__(self, workshop):
self.workshop = workshop
- def get_name(self):
- return self.name
-
- def get_title(self):
- return self.title
+ @property
+ def name(self):
+ # TODO: Get from file
+ return ''
|
81756324744334de39a0b151d9acac9e24774b9d
|
api/management/commands/deleteuselessactivities.py
|
api/management/commands/deleteuselessactivities.py
|
from django.core.management.base import BaseCommand, CommandError
from api import models
from django.db.models import Count, Q
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
if 'NR' in args:
print 'Delete activities of N/R cards'
activities = models.Activity.objects.filter(Q(ownedcard__card__rarity='R') | Q(ownedcard__card__rarity='N'))
count = activities.count()
activities.delete()
print ' Deleted %d activities.' % (count)
print 'Delete activities > 50 per user'
accounts = models.Account.objects.all()
for account in accounts:
to_keep = models.Activity.objects.filter(account=account).order_by('-creation')[:50]
to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep)
count = to_delete.count()
if count > 0:
to_delete.delete()
print ' %s Deleted %d activities.' % (account, count)
|
from django.core.management.base import BaseCommand, CommandError
from api import models
from django.db.models import Count, Q
class Command(BaseCommand):
can_import_settings = True
def handle(self, *args, **options):
if 'NR' in args:
print 'Delete activities of N/R cards'
activities = models.Activity.objects.filter(Q(ownedcard__card__rarity='R') | Q(ownedcard__card__rarity='N'))
count = activities.count()
activities.delete()
print ' Deleted %d activities.' % (count)
print 'Delete activities > 50 per user'
accounts = models.Account.objects.all()
for account in accounts:
to_keep = models.Activity.objects.filter(account=account).order_by('-creation')[:50]
to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep.values('pk'))
count = to_delete.count()
if count > 0:
to_delete.delete()
print ' %s Deleted %d activities.' % (account, count)
|
Use list of values and not subquery (less efficient but do not use limit)
|
Use list of values and not subquery (less efficient but do not use limit)
|
Python
|
apache-2.0
|
rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,laurenor/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,laurenor/SchoolIdolAPI,laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,dburr/SchoolIdolAPI,rdsathene/SchoolIdolAPI,dburr/SchoolIdolAPI
|
---
+++
@@ -18,7 +18,7 @@
accounts = models.Account.objects.all()
for account in accounts:
to_keep = models.Activity.objects.filter(account=account).order_by('-creation')[:50]
- to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep)
+ to_delete = models.Activity.objects.filter(account=account).exclude(pk__in=to_keep.values('pk'))
count = to_delete.count()
if count > 0:
to_delete.delete()
|
1e45a5d781f426a383721b9c293f3d4b976fabed
|
image_cropping/thumbnail_processors.py
|
image_cropping/thumbnail_processors.py
|
import logging
logger = logging.getLogger(__name__)
def crop_corners(image, box=None, **kwargs):
"""
Crop corners to the selection defined by image_cropping
`box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers.
"""
if not box:
return image
if not isinstance(box, (list, tuple)):
# convert cropping string to a list of integers if necessary
try:
box = map(int, box.split(','))
except ValueError:
# there's garbage in the cropping field, ignore
logger.warning(
'Unable to parse "box" parameter "%s". Ignoring.' % box)
except AttributeError:
pass
if len(box) == 4:
if box[0] < 0:
# a negative first box value indicates that cropping is disabled
return image
width = abs(box[2] - box[0])
height = abs(box[3] - box[1])
if width and height and (width, height) != image.size:
image = image.crop(box)
else:
logger.warning(
'"box" parameter requires four values. Ignoring "%r".' % box)
return image
|
import logging
logger = logging.getLogger(__name__)
def crop_corners(image, box=None, **kwargs):
"""
Crop corners to the selection defined by image_cropping
`box` is a string of the format 'x1,y1,x2,y2' or a four-tuple of integers.
"""
if not box:
return image
if not isinstance(box, (list, tuple)):
# convert cropping string to a list of integers if necessary
try:
box = map(int, box.split(','))
except ValueError:
# there's garbage in the cropping field, ignore
logger.warning(
'Unable to parse "box" parameter "%s". Ignoring.' % box)
except AttributeError:
pass
if len(box) == 4:
if box[0] < 0:
# a negative first box value indicates that cropping is disabled
return image
width = abs(box[2] - box[0])
height = abs(box[3] - box[1])
if width and height and (width, height) != image.size:
image = image.crop(box)
else:
logger.warning(
'"box" parameter requires four values. Ignoring "%r".' % box)
return image
|
Correct typo in documentation of crop_corners
|
Correct typo in documentation of crop_corners
|
Python
|
bsd-3-clause
|
henriquechehad/django-image-cropping,winzard/django-image-cropping,winzard/django-image-cropping,henriquechehad/django-image-cropping,winzard/django-image-cropping,henriquechehad/django-image-cropping
|
---
+++
@@ -8,7 +8,7 @@
"""
Crop corners to the selection defined by image_cropping
- `box` is a string of the format 'x1,y1,x2,y1' or a four-tuple of integers.
+ `box` is a string of the format 'x1,y1,x2,y2' or a four-tuple of integers.
"""
if not box:
return image
|
7892e34e31ebfe7d3aba27bf147b6c669b428c07
|
journal.py
|
journal.py
|
# -*- coding: utf-8 -*-
from flask import Flask
from contextlib import closing
import os, psycopg2
DB_SCHEMA= """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app= Flask(__name__)
app.config['DATABASE']= os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=store'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""Initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
@app.route('/')
def hello():
return u'Hello World!'
if __name__ == '__main__':
app.run(debug=True)
|
# -*- coding: utf-8 -*-
from flask import Flask
from flask import g
from contextlib import closing
import os, psycopg2
DB_SCHEMA= """
DROP TABLE IF EXISTS entries;
CREATE TABLE entries (
id serial PRIMARY KEY,
title VARCHAR (127) NOT NULL,
text TEXT NOT NULL,
created TIMESTAMP NOT NULL
)
"""
app= Flask(__name__)
app.config['DATABASE']= os.environ.get(
'DATABASE_URL', 'dbname=learning_journal user=store'
)
def connect_db():
"""Return a connection to the configured database"""
return psycopg2.connect(app.config['DATABASE'])
def init_db():
"""Initialize the database using DB_SCHEMA
WARNING: executing this function will drop existing tables.
"""
with closing(connect_db()) as db:
db.cursor().execute(DB_SCHEMA)
db.commit()
def get_database_connection():
db= getattr(g, 'db', None)
if g.db is None:
g.db= db= connect_db()
return db
@app.teardown_request
def teardown_request(exception):
db= getattr(g, 'db', None)
if db is not None:
if exception and isinstance(exception, psycopg2.Error):
# if there was a problem with the database, rollback any
# existing transaction
db.rollback()
else:
db.commit()
db.close()
@app.route('/')
def hello():
return u'Hello World!'
if __name__ == '__main__':
app.run(debug=True)
|
Add functionality to establish connection with database, and disallow operation on said database if there is a problem with the connection
|
Add functionality to establish connection with database, and disallow operation on said database if there is a problem with the connection
|
Python
|
mit
|
charlieRode/learning_journal
|
---
+++
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
from flask import Flask
+from flask import g
from contextlib import closing
import os, psycopg2
@@ -31,6 +32,25 @@
db.cursor().execute(DB_SCHEMA)
db.commit()
+def get_database_connection():
+ db= getattr(g, 'db', None)
+ if g.db is None:
+ g.db= db= connect_db()
+ return db
+
+@app.teardown_request
+def teardown_request(exception):
+ db= getattr(g, 'db', None)
+ if db is not None:
+ if exception and isinstance(exception, psycopg2.Error):
+ # if there was a problem with the database, rollback any
+ # existing transaction
+ db.rollback()
+ else:
+ db.commit()
+ db.close()
+
+
@app.route('/')
def hello():
return u'Hello World!'
|
faa0e5fd214151e8b0bb8fb18772807aa020c4bf
|
infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
|
infrastructure/crowdin/crowdin_bot_python_package/crowdin_bot/get_crowdin_languages.py
|
"""Script to print list of all crowdin language codes for project."""
from crowdin_bot import api
NS_DICT = {
'ns': "urn:oasis:names:tc:xliff:document:1.2"
}
def get_project_languages():
"""Get list of crowdin language codes.
Returns:
(list) list of project crowdin language codes
"""
info_xml = api.api_call_xml("info")
languages = info_xml.find('languages')
translatable_languages = []
for language in languages:
# Check it's not the incontext pseudo language
if language.find("can_translate").text == "1":
translatable_languages.append(language.find('code').text)
return translatable_languages
if __name__ == "__main__":
print('\n'.join(get_project_languages()))
|
"""Script to print list of all crowdin language codes for project."""
from crowdin_bot import api
NS_DICT = {
'ns': "urn:oasis:names:tc:xliff:document:1.2"
}
def get_project_languages():
"""Get list of crowdin language codes.
Returns:
(list) list of project crowdin language codes
"""
active_languages = []
trans_status = api.api_call_json("status")
for language in trans_status:
# Check language has actually had some translation done
if int(language["words_approved"]) > 0:
active_languages.append(language["code"])
return active_languages
if __name__ == "__main__":
for language in get_project_languages():
print(language)
|
Modify crowdin_bot to only include languages that have >0 translations
|
Modify crowdin_bot to only include languages that have >0 translations
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
---
+++
@@ -12,14 +12,14 @@
Returns:
(list) list of project crowdin language codes
"""
- info_xml = api.api_call_xml("info")
- languages = info_xml.find('languages')
- translatable_languages = []
- for language in languages:
- # Check it's not the incontext pseudo language
- if language.find("can_translate").text == "1":
- translatable_languages.append(language.find('code').text)
- return translatable_languages
+ active_languages = []
+ trans_status = api.api_call_json("status")
+ for language in trans_status:
+ # Check language has actually had some translation done
+ if int(language["words_approved"]) > 0:
+ active_languages.append(language["code"])
+ return active_languages
if __name__ == "__main__":
- print('\n'.join(get_project_languages()))
+ for language in get_project_languages():
+ print(language)
|
d5cfb72626d486276af842b152d19c19d6d7b58c
|
bika/lims/subscribers/objectmodified.py
|
bika/lims/subscribers/objectmodified.py
|
from Products.CMFCore.utils import getToolByName
def ObjectModifiedEventHandler(obj, event):
""" Various types need automation on edit.
"""
if not hasattr(obj, 'portal_type'):
return
if obj.portal_type == 'Calculation':
pr = getToolByName(obj, 'portal_repository')
uc = getToolByName(obj, 'uid_catalog')
obj = uc(UID=obj.UID())[0].getObject()
backrefs = obj.getBackReferences('AnalysisServiceCalculation')
for i, service in enumerate(backrefs):
service = uc(UID=service.UID())[0].getObject()
pr.save(obj=service, comment="Calculation updated to version %s"%
(obj.version_id+1,))
service.reference_versions[obj.UID()] = obj.version_id + 1
elif obj.portal_type == 'Client':
# When modifying these values, keep in sync with setuphandlers.py
mp = obj.manage_permission
mp(permissions.ListFolderContents, ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver'], 0)
mp(permissions.View, ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver'], 0)
mp('Access contents information', ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver', 'Owner'], 0)
|
from Products.CMFCore.utils import getToolByName
from Products.CMFCore import permissions
def ObjectModifiedEventHandler(obj, event):
""" Various types need automation on edit.
"""
if not hasattr(obj, 'portal_type'):
return
if obj.portal_type == 'Calculation':
pr = getToolByName(obj, 'portal_repository')
uc = getToolByName(obj, 'uid_catalog')
obj = uc(UID=obj.UID())[0].getObject()
backrefs = obj.getBackReferences('AnalysisServiceCalculation')
for i, service in enumerate(backrefs):
service = uc(UID=service.UID())[0].getObject()
pr.save(obj=service, comment="Calculation updated to version %s" %
(obj.version_id + 1,))
service.reference_versions[obj.UID()] = obj.version_id + 1
elif obj.portal_type == 'Client':
# When modifying these values, keep in sync with setuphandlers.py
mp = obj.manage_permission
mp(permissions.ListFolderContents, ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver'], 0)
mp(permissions.View, ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver'], 0)
mp('Access contents information', ['Manager', 'LabManager', 'LabClerk', 'Analyst', 'Sampler', 'Preserver', 'Owner'], 0)
|
Fix missing import in Client modified subscriber
|
Fix missing import in Client modified subscriber
|
Python
|
agpl-3.0
|
anneline/Bika-LIMS,anneline/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,DeBortoliWines/Bika-LIMS,rockfruit/bika.lims,veroc/Bika-LIMS,rockfruit/bika.lims,labsanmartin/Bika-LIMS,labsanmartin/Bika-LIMS,labsanmartin/Bika-LIMS,DeBortoliWines/Bika-LIMS,DeBortoliWines/Bika-LIMS,anneline/Bika-LIMS
|
---
+++
@@ -1,4 +1,6 @@
from Products.CMFCore.utils import getToolByName
+from Products.CMFCore import permissions
+
def ObjectModifiedEventHandler(obj, event):
""" Various types need automation on edit.
@@ -13,9 +15,10 @@
backrefs = obj.getBackReferences('AnalysisServiceCalculation')
for i, service in enumerate(backrefs):
service = uc(UID=service.UID())[0].getObject()
- pr.save(obj=service, comment="Calculation updated to version %s"%
- (obj.version_id+1,))
+ pr.save(obj=service, comment="Calculation updated to version %s" %
+ (obj.version_id + 1,))
service.reference_versions[obj.UID()] = obj.version_id + 1
+
elif obj.portal_type == 'Client':
# When modifying these values, keep in sync with setuphandlers.py
mp = obj.manage_permission
|
13a698e9ca9c46e31fa369af811a68e705571aca
|
tests/test_installation.py
|
tests/test_installation.py
|
"""
Role tests
"""
from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_packages(host):
"""
Check if packages are installed
"""
packages = []
if host.system_info.distribution == 'debian':
packages = [
'locales',
]
elif host.system_info.distribution == 'ubuntu':
packages = [
'locales',
'language-pack-fr',
]
for package in packages:
assert host.package(package).is_installed
|
"""
Role tests
"""
from testinfra.utils.ansible_runner import AnsibleRunner
testinfra_hosts = AnsibleRunner('.molecule/ansible_inventory').get_hosts('all')
def test_packages(host):
"""
Check if packages are installed
"""
packages = []
if host.system_info.distribution == 'debian':
packages = [
'locales',
]
elif host.system_info.distribution == 'ubuntu':
packages = [
'locales',
'language-pack-en',
]
for package in packages:
assert host.package(package).is_installed
|
Update tests with new default language
|
Update tests with new default language
|
Python
|
mit
|
infOpen/ansible-role-locales
|
---
+++
@@ -21,7 +21,7 @@
elif host.system_info.distribution == 'ubuntu':
packages = [
'locales',
- 'language-pack-fr',
+ 'language-pack-en',
]
for package in packages:
|
0cb3aa5947b5c5da802c05ae16bc138441c2c921
|
accounts/views.py
|
accounts/views.py
|
from django.shortcuts import render
def index(request):
if not request.user.is_authenticated():
return render(request, 'account/index.html')
else:
return render(request, 'account/user_home.html')
|
from django.core.urlresolvers import reverse
from django.shortcuts import redirect, render
def index(request):
if not request.user.is_authenticated():
return render(request, 'account/index.html')
else:
return redirect(reverse('quizzes:index'))
|
Use quiz index as user home temporarily
|
Use quiz index as user home temporarily
|
Python
|
mit
|
lockhawksp/beethoven,lockhawksp/beethoven
|
---
+++
@@ -1,4 +1,5 @@
-from django.shortcuts import render
+from django.core.urlresolvers import reverse
+from django.shortcuts import redirect, render
def index(request):
@@ -6,4 +7,4 @@
return render(request, 'account/index.html')
else:
- return render(request, 'account/user_home.html')
+ return redirect(reverse('quizzes:index'))
|
b02c5736a6a0875da7e7feeaa433f4870d1f4bca
|
indra/sources/eidos/eidos_reader.py
|
indra/sources/eidos/eidos_reader.py
|
from indra.java_vm import autoclass, JavaException
from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.jsonAST(mentions)
json_dict = get_python_json(mentions_json)
return json_dict
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
Simplify Eidos reader, use Eidos JSON String call
|
Simplify Eidos reader, use Eidos JSON String call
|
Python
|
bsd-2-clause
|
sorgerlab/belpy,bgyori/indra,pvtodorov/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,sorgerlab/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/indra,johnbachman/indra,johnbachman/indra,bgyori/indra,pvtodorov/indra
|
---
+++
@@ -1,5 +1,5 @@
+import json
from indra.java_vm import autoclass, JavaException
-from .scala_utils import get_python_json
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
@@ -36,7 +36,7 @@
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
- mentions_json = ser.jsonAST(mentions)
- json_dict = get_python_json(mentions_json)
+ mentions_json = ser.toJsonStr(mentions)
+ json_dict = json.loads(mentions_json)
return json_dict
|
f34d0d43311e51bcb04c5cbdf5bb31b7a8093feb
|
pyconde/tagging.py
|
pyconde/tagging.py
|
"""
This abstracts some of the functionality provided by django-taggit in order
to normalize the tags provided by the users.
"""
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through, model=model, instance=instance
)
return manager
|
"""
This abstracts some of the functionality provided by django-taggit in order
to normalize the tags provided by the users.
"""
from taggit import managers as taggit_managers
def _normalize_tag(t):
if isinstance(t, unicode):
return t.lower()
return t
class _TaggableManager(taggit_managers._TaggableManager):
def add(self, *tags):
return super(_TaggableManager, self).add(*[
_normalize_tag(t) for t in tags])
class TaggableManager(taggit_managers.TaggableManager):
def __get__(self, instance, model):
if instance is not None and instance.pk is None:
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
through=self.through,
model=model,
instance=instance,
prefetch_cache_name=self.name
)
return manager
|
Fix regression introduced by updating taggit (27971d6eed)
|
Fix regression introduced by updating taggit (27971d6eed)
django-taggit 0.11+ introduced support for prefetch_related which breaks
our taggit wrapping: alex/django-taggit@4f2e47f833
|
Python
|
bsd-3-clause
|
pysv/djep,pysv/djep,EuroPython/djep,pysv/djep,pysv/djep,pysv/djep,EuroPython/djep,EuroPython/djep,EuroPython/djep
|
---
+++
@@ -24,6 +24,9 @@
raise ValueError("%s objects need to have a primary key value "
"before you can access their tags." % model.__name__)
manager = _TaggableManager(
- through=self.through, model=model, instance=instance
+ through=self.through,
+ model=model,
+ instance=instance,
+ prefetch_cache_name=self.name
)
return manager
|
65d8715705e07dc7f091e2da47a7ada923c6cfbb
|
release.py
|
release.py
|
"""
Setuptools is released using 'jaraco.packaging.release'. To make a release,
install jaraco.packaging and run 'python -m jaraco.packaging.release'
"""
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
"""
Setuptools is released using 'jaraco.packaging.release'. To make a release,
install jaraco.packaging and run 'python -m jaraco.packaging.release'
"""
import os
import subprocess
import pkg_resources
pkg_resources.require('jaraco.packaging>=2.0')
pkg_resources.require('wheel')
def before_upload():
BootstrapBookmark.add()
def after_push():
BootstrapBookmark.push()
files_with_versions = (
'ez_setup.py', 'setuptools/version.py',
)
# bdist_wheel must be included or pip will break
dist_commands = 'sdist', 'bdist_wheel'
test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools"
os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1"
class BootstrapBookmark:
name = 'bootstrap'
@classmethod
def add(cls):
cmd = ['hg', 'bookmark', '-i', cls.name, '-f']
subprocess.Popen(cmd)
@classmethod
def push(cls):
"""
Push the bootstrap bookmark
"""
push_command = ['hg', 'push', '-B', cls.name]
# don't use check_call here because mercurial will return a non-zero
# code even if it succeeds at pushing the bookmark (because there are
# no changesets to be pushed). !dm mercurial
subprocess.call(push_command)
|
Remove lingering reference to linked changelog.
|
Remove lingering reference to linked changelog.
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
---
+++
@@ -17,7 +17,6 @@
def after_push():
- os.remove('CHANGES (links).txt')
BootstrapBookmark.push()
files_with_versions = (
|
ae3d94fbc9a53df6bbeb0fedf6bb660ba6cd4b40
|
rpy2_helpers.py
|
rpy2_helpers.py
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
from rpy2.robjects import DataFrame
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
#! /usr/bin/env python2.7
"""Avoid some boilerplate rpy2 usage code with helpers.
Mostly I wrote this so that I can use xyplot without having
to remember a lot of details.
"""
import click
from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
grdevices = importr('grDevices')
lattice = importr('lattice')
rprint = globalenv.get("print")
def xyplot(formula, data, **kwargs):
if not isinstance(formula, Formula):
formula = Formula(formula)
plot = lattice.xyplot(
formula, data, **kwargs)
rprint(plot)
@click.command()
def main():
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
x = np.random.random_integers(0, 100, 100)
x.sort()
y = np.square(x)
xyplot('y ~ x', DataFrame({'x': x, 'y': y}))
raw_input('Hit enter to exit.')
grdevices.dev_off()
if __name__ == '__main__':
main()
|
Make DataFrame available to module user
|
Make DataFrame available to module user
|
Python
|
mit
|
ecashin/rpy2_helpers
|
---
+++
@@ -7,7 +7,7 @@
"""
import click
-from rpy2.robjects import Formula, globalenv
+from rpy2.robjects import DataFrame, Formula, globalenv
from rpy2.robjects.packages import importr
@@ -29,7 +29,6 @@
import numpy as np
from rpy2.robjects import numpy2ri
numpy2ri.activate()
- from rpy2.robjects import DataFrame
x = np.random.random_integers(0, 100, 100)
x.sort()
|
13d1895a979cfb210e097e4d471238bf36c88c65
|
website/db_create.py
|
website/db_create.py
|
#!/usr/bin/env python3
from database import db
from database import bdb
from database import bdb_refseq
from import_data import import_data
import argparse
def restet_relational_db():
print('Removing relational database...')
db.reflect()
db.drop_all()
print('Removing relational database completed.')
print('Recreating relational database...')
db.create_all()
print('Recreating relational database completed.')
def reset_mappings_db():
print('Removing mappigns database...')
bdb.reset()
bdb_refseq.reset()
print('Removing mapings database completed.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-i',
'--import_mappings',
default=False,
help='Should mappings be (re)imported'
)
parser.add_argument(
'-r',
'--reload_relational',
default=False,
help='Should relational database be (re)imported'
)
args = parser.parse_args()
if args.import_mappings:
reset_mappings_db()
if args.reload_relational:
restet_relational_db()
if args.reload_relational or args.import_mappings:
print('Importing data')
import_data(
import_mappings=args.import_mappings,
reload_relational=args.reload_relational
)
print('Importing completed')
print('Done, all tasks completed.')
else:
print('This script should be run from command line')
|
#!/usr/bin/env python3
from database import db
from database import bdb
from database import bdb_refseq
from import_data import import_data
import argparse
def restet_relational_db():
print('Removing relational database...')
db.reflect()
db.drop_all()
print('Removing relational database completed.')
print('Recreating relational database...')
db.create_all()
print('Recreating relational database completed.')
def reset_mappings_db():
print('Removing mappigns database...')
bdb.reset()
bdb_refseq.reset()
print('Removing mapings database completed.')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'-i',
'--import_mappings',
action='store_true',
help='Should mappings be (re)imported'
)
parser.add_argument(
'-r',
'--reload_relational',
action='store_true',
help='Should relational database be (re)imported'
)
args = parser.parse_args()
if args.import_mappings:
reset_mappings_db()
if args.reload_relational:
restet_relational_db()
if args.reload_relational or args.import_mappings:
print('Importing data')
import_data(
import_mappings=args.import_mappings,
reload_relational=args.reload_relational
)
print('Importing completed')
print('Done, all tasks completed.')
else:
print('This script should be run from command line')
|
Use store true in db creation script
|
Use store true in db creation script
|
Python
|
lgpl-2.1
|
reimandlab/ActiveDriverDB,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualisation-Framework-for-Genome-Mutations,reimandlab/Visualistion-Framework-for-Genome-Mutations,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB,reimandlab/ActiveDriverDB
|
---
+++
@@ -30,13 +30,13 @@
parser.add_argument(
'-i',
'--import_mappings',
- default=False,
+ action='store_true',
help='Should mappings be (re)imported'
)
parser.add_argument(
'-r',
'--reload_relational',
- default=False,
+ action='store_true',
help='Should relational database be (re)imported'
)
|
e12eb10d699fce8e0081acc44025035f703b4dc5
|
crits/core/user_migrate.py
|
crits/core/user_migrate.py
|
def migrate_user(self):
"""
Migrate to latest schema version.
"""
migrate_1_to_2(self)
migrate_2_to_3(self)
def migrate_1_to_2(self):
"""
Migrate from schema 1 to schema 2.
"""
if self.schema_version == 1:
self.schema_version = 2
notify_email = getattr(self.unsupported_attrs, 'email_notifications', False)
theme = getattr(self.unsupported_attrs, 'theme', 'default')
pagesize = getattr(self.unsupported_attrs, 'jtable_list_size', 25)
for k in ('email_notifications', 'theme', 'jtable_list_size'):
setattr(self.unsupported_attrs, k, None)
setattr(self.prefs, 'ui', {'theme': theme, 'table_page_size': pagesize})
setattr(self.prefs,'notify', {'email': notify_email})
self.save()
self.reload()
def migrate_2_to_3(self):
"""
Migrate from schema 2 to schema 3.
"""
if self.schema_version == 2:
self.schema_version = 3
self.favorites['Backdoor'] = []
self.save()
self.reload()
|
def migrate_user(self):
"""
Migrate to latest schema version.
"""
migrate_1_to_2(self)
migrate_2_to_3(self)
def migrate_1_to_2(self):
"""
Migrate from schema 1 to schema 2.
"""
if self.schema_version == 1:
self.schema_version = 2
notify_email = getattr(self.unsupported_attrs, 'email_notifications', False)
theme = getattr(self.unsupported_attrs, 'theme', 'default')
pagesize = getattr(self.unsupported_attrs, 'jtable_list_size', 25)
for k in ('email_notifications', 'theme', 'jtable_list_size'):
setattr(self.unsupported_attrs, k, None)
setattr(self.prefs, 'ui', {'theme': theme, 'table_page_size': pagesize})
setattr(self.prefs,'notify', {'email': notify_email})
self.save()
self.reload()
def migrate_2_to_3(self):
"""
Migrate from schema 2 to schema 3.
"""
if self.schema_version == 2:
self.schema_version = 3
self.favorites['Backdoor'] = []
self.favorites['Exploit'] = []
self.save()
self.reload()
|
Add default exploit to user migration.
|
Add default exploit to user migration.
|
Python
|
mit
|
cdorer/crits,DukeOfHazard/crits,korrosivesec/crits,Lambdanaut/crits,DukeOfHazard/crits,jinverar/crits,jinverar/crits,jhuapl-marti/marti,korrosivesec/crits,Magicked/crits,kaoscoach/crits,blaquee/crits,kaoscoach/crits,blaquee/crits,ckane/crits,HardlyHaki/crits,Magicked/crits,cdorer/crits,korrosivesec/crits,cfossace/crits,lakiw/cripts,kaoscoach/crits,dreardon/crits,cdorer/crits,dreardon/crits,dreardon/crits,jhuapl-marti/marti,ckane/crits,lakiw/cripts,Magicked/crits,kaoscoach/crits,blaquee/crits,jinverar/crits,jhuapl-marti/marti,Magicked/crits,DukeOfHazard/crits,korrosivesec/crits,jinverar/crits,0x3a/crits,Lambdanaut/crits,lakiw/cripts,cfossace/crits,HardlyHaki/crits,cfossace/crits,0x3a/crits,cdorer/crits,HardlyHaki/crits,HardlyHaki/crits,ckane/crits,Lambdanaut/crits,jhuapl-marti/marti,cfossace/crits,Lambdanaut/crits,0x3a/crits,0x3a/crits,blaquee/crits,DukeOfHazard/crits,lakiw/cripts,ckane/crits,dreardon/crits
|
---
+++
@@ -36,6 +36,7 @@
self.schema_version = 3
self.favorites['Backdoor'] = []
+ self.favorites['Exploit'] = []
self.save()
self.reload()
|
f09f33a6ddf0cf397838068e9cc3bc82464bf699
|
labelme/labelme/spiders/__init__.py
|
labelme/labelme/spiders/__init__.py
|
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
|
# This package will contain the spiders of your Scrapy project
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
import scrapy
ANNOTATION_URL = 'http://people.csail.mit.edu/brussell/research/LabelMe/Annotations/'
IMG_URL = 'http://people.csail.mit.edu/brussell/research/LabelMe/Images/'
class AnnotationSpider(scrapy.Spider):
name = 'annotations'
start_urls = [ANNOTATION_URL]
def parse_annotation(self, response):
pass
def parse(self, response):
pass
class ImageSpider(scrapy.Spider):
name = 'images'
start_urls = [IMG_URL]
def parse_image(self, response):
pass
def parse(self, response):
pass
|
Add a scaffold for spiders to crawl annotations and images
|
Add a scaffold for spiders to crawl annotations and images
|
Python
|
mit
|
paopow/LabelMeCrawler
|
---
+++
@@ -2,3 +2,29 @@
#
# Please refer to the documentation for information on how to create and manage
# your spiders.
+import scrapy
+
+ANNOTATION_URL = 'http://people.csail.mit.edu/brussell/research/LabelMe/Annotations/'
+IMG_URL = 'http://people.csail.mit.edu/brussell/research/LabelMe/Images/'
+
+
+class AnnotationSpider(scrapy.Spider):
+ name = 'annotations'
+ start_urls = [ANNOTATION_URL]
+
+ def parse_annotation(self, response):
+ pass
+
+ def parse(self, response):
+ pass
+
+
+class ImageSpider(scrapy.Spider):
+ name = 'images'
+ start_urls = [IMG_URL]
+
+ def parse_image(self, response):
+ pass
+
+ def parse(self, response):
+ pass
|
ba8de67d006c461b736f98f2bb1fcb876ec06830
|
svs_interface.py
|
svs_interface.py
|
#!/usr/bin/env python
import subprocess
from Tkinter import *
from tkFileDialog import *
import os
class GpgApp(object):
def __init__(self, master):
frame = Frame(master)
frame.pack()
self.text = Text()
self.text.pack()
menu = Menu(master)
root.config(menu=menu)
filemenu = Menu(menu, tearoff=0)
menu.add_cascade(label="File", menu=filemenu)
filemenu.add_command(label="Open", command=self.filename_open)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=self.do_exit)
def filename_open(self):
fin = askopenfilenames()
if fin:
self.text.insert(END,fin)
return fin
def do_exit(self):
root.destroy()
root = Tk()
root.title("a simple GnuPG interface")
app = GpgApp(root)
root.mainloop()
|
#!/usr/bin/env python
import subprocess
from Tkinter import *
from tkFileDialog import *
import os
GPG = 'gpg2'
SERVER_KEY = '' # replace with gpg key ID of server key
class GpgApp(object):
def __init__(self, master):
frame = Frame(master)
frame.pack()
self.text = Text()
self.text.pack()
menu = Menu(master)
root.config(menu=menu)
filemenu = Menu(menu, tearoff=0)
menu.add_cascade(label="File", menu=filemenu)
filemenu.add_command(label="Open", command=self.filename_open)
filemenu.add_separator()
filemenu.add_command(label="Exit", command=self.do_exit)
def filename_open(self):
fin = askopenfilenames()
if fin:
self.text.insert(END,fin)
return fin
def encrypt_file(self, input_file, output_file, recipient):
args = [GPG, '--output', output_file, '--recipient', recipient, '-sea', input_file]
subprocess.call(args)
def do_exit(self):
root.destroy()
root = Tk()
root.title("a simple GnuPG interface")
app = GpgApp(root)
root.mainloop()
|
Add method to encrypt files
|
Add method to encrypt files
|
Python
|
agpl-3.0
|
mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream,mark-in/securedrop-prov-upstream
|
---
+++
@@ -4,6 +4,9 @@
from Tkinter import *
from tkFileDialog import *
import os
+
+GPG = 'gpg2'
+SERVER_KEY = '' # replace with gpg key ID of server key
class GpgApp(object):
def __init__(self, master):
@@ -24,6 +27,9 @@
if fin:
self.text.insert(END,fin)
return fin
+ def encrypt_file(self, input_file, output_file, recipient):
+ args = [GPG, '--output', output_file, '--recipient', recipient, '-sea', input_file]
+ subprocess.call(args)
def do_exit(self):
root.destroy()
|
70fb3744c07d14e5796e62992775cc97046f60ce
|
package/scripts/ambari_helpers.py
|
package/scripts/ambari_helpers.py
|
from resource_management import *
import os
def create_hdfs_dir(path, owner, perms):
Execute('hadoop fs -mkdir -p '+path, user='hdfs')
Execute('hadoop fs -chown ' + owner + ' ' + path, user='hdfs')
Execute('hadoop fs -chmod ' + perms + ' ' + path, user='hdfs')
def package(name):
import params
Execute(params.package_mgr + ' install -y ' + name, user='root')
def add_repo(source, dest):
import params
if not os.path.isfile(dest + params.repo_file):
Execute('cp ' + source + ' ' + dest)
Execute(params.key_cmd)
Execute(params.cache_cmd)
def cdap_config():
import params
# We're only setup for *NIX, for now
Directory( params.etc_prefix_dir,
mode=0755
)
Directory( params.cdap_conf_dir,
owner = params.cdap_user,
group = params.user_group,
recursive = True
)
XmlConfig( "cdap-site.xml",
conf_dir = params.cdap_conf_dir,
configurations = params.config['configurations']['cdap-site'],
configuration_attributes=params.config['configuration_attributes']['cdap-site'],
owner = params.cdap_user,
group = params.user_group
)
File(format("{cdap_conf_dir}/cdap-env.sh"),
owner = params.cdap_user,
content=InlineTemplate(params.cdap_env_sh_template)
)
|
from resource_management import *
import os
def create_hdfs_dir(path, owner, perms):
Execute('hadoop fs -mkdir -p '+path, user='hdfs')
Execute('hadoop fs -chown ' + owner + ' ' + path, user='hdfs')
Execute('hadoop fs -chmod ' + str(perms) + ' ' + path, user='hdfs')
def package(name):
import params
Execute(params.package_mgr + ' install -y ' + name, user='root')
def add_repo(source, dest):
import params
if not os.path.isfile(dest + params.repo_file):
Execute('cp ' + source + ' ' + dest)
Execute(params.key_cmd)
Execute(params.cache_cmd)
def cdap_config():
import params
# We're only setup for *NIX, for now
Directory( params.etc_prefix_dir,
mode=0755
)
Directory( params.cdap_conf_dir,
owner = params.cdap_user,
group = params.user_group,
recursive = True
)
XmlConfig( "cdap-site.xml",
conf_dir = params.cdap_conf_dir,
configurations = params.config['configurations']['cdap-site'],
configuration_attributes=params.config['configuration_attributes']['cdap-site'],
owner = params.cdap_user,
group = params.user_group
)
File(format("{cdap_conf_dir}/cdap-env.sh"),
owner = params.cdap_user,
content=InlineTemplate(params.cdap_env_sh_template)
)
|
Convert perms to a string
|
Convert perms to a string
|
Python
|
apache-2.0
|
cdapio/cdap-ambari-service,cdapio/cdap-ambari-service
|
---
+++
@@ -4,7 +4,7 @@
def create_hdfs_dir(path, owner, perms):
Execute('hadoop fs -mkdir -p '+path, user='hdfs')
Execute('hadoop fs -chown ' + owner + ' ' + path, user='hdfs')
- Execute('hadoop fs -chmod ' + perms + ' ' + path, user='hdfs')
+ Execute('hadoop fs -chmod ' + str(perms) + ' ' + path, user='hdfs')
def package(name):
import params
|
2be69ba584b76134fc055ea17b476ce32ce5bf1e
|
haas/drivers/__init__.py
|
haas/drivers/__init__.py
|
"""Network switch drivers for the HaaS.
This package provides HaaS drivers for various network switches. The
functions in the top-level module should not be used; they only exist
as a place to document the interface shared by all of the drivers.
Port IDs and network IDs should both be strings. The content of them will be
driver-specific.
"""
def apply_networking(net_map):
"""Takes in a dictionary, mapping port IDs to network IDs.
For each key-value pair (port, network) in the dictionary, set that port
to access that network. If network is None, set it to access nothing.
"""
def get_new_network_id(db):
"""Gets a new network ID, valid for this driver. Returns 'None' if there
are no more possible IDs available. Pass in the database connection, to
make the allocation part of the current transaction.
"""
def free_network_id(db, net_id):
"""Marks a network ID as unused, so that it can be re-used for a new
network. Can be a no-op on some drivers. Pass in the database
connection, to make the freeing part of the current transaction.
"""
def init_db():
"""Initializes any database tables and/or objects that the driver needs to
have to function correctly.
"""
|
"""Network switch drivers for the HaaS.
This package provides HaaS drivers for various network switches. The
functions in the top-level module should not be used; they only exist
as a place to document the interface shared by all of the drivers.
Port IDs and network IDs should both be strings. The content of them will be
driver-specific.
Note that get_new_network_id and free_network_id both accept a database
connection. They should not commit any changes---this way, if there is a
crash between the function returning, and the network actually being assigned
or removed from a network object, the entire transaction is cancelled.
Stateless drivers such as 'null' don't need to worry about this. Drivers
whose state is in the database, such as 'dell', require this. Drivers with
external state may need to do some difficult work to make this work.
"""
def apply_networking(net_map):
"""Takes in a dictionary, mapping port IDs to network IDs.
For each key-value pair (port, network) in the dictionary, set that port
to access that network. If network is None, set it to access nothing.
"""
def get_new_network_id(db):
"""Gets a new network ID, valid for this driver. Returns 'None' if there
are no more possible IDs available. Pass in the database connection, to
make the allocation part of the current transaction.
"""
def free_network_id(db, net_id):
"""Marks a network ID as unused, so that it can be re-used for a new
network. Can be a no-op on some drivers. Pass in the database
connection, to make the freeing part of the current transaction.
"""
def init_db():
"""Initializes any database tables and/or objects that the driver needs to
have to function correctly.
"""
|
Document reason for previous change
|
Document reason for previous change
|
Python
|
apache-2.0
|
meng-sun/hil,henn/haas,kylehogan/hil,henn/hil,henn/hil_sahil,henn/hil,kylehogan/hil,SahilTikale/haas,lokI8/haas,CCI-MOC/haas,meng-sun/hil,apoorvemohan/haas,kylehogan/haas,SahilTikale/switchHaaS,apoorvemohan/haas,henn/hil_sahil
|
---
+++
@@ -6,6 +6,15 @@
Port IDs and network IDs should both be strings. The content of them will be
driver-specific.
+
+Note that get_new_network_id and free_network_id both accept a database
+connection. They should not commit any changes---this way, if there is a
+crash between the function returning, and the network actually being assigned
+or removed from a network object, the entire transaction is cancelled.
+
+Stateless drivers such as 'null' don't need to worry about this. Drivers
+whose state is in the database, such as 'dell', require this. Drivers with
+external state may need to do some difficult work to make this work.
"""
|
2781d26ecd6440a97e168f3b6a51c96eae25c004
|
examples/guv_simple_http_response.py
|
examples/guv_simple_http_response.py
|
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
response_times = []
def get_avg_time():
global response_times
times = response_times[-1000:]
avg = sum(times) / len(times)
if len(response_times) > 5000:
response_times = times
return avg
def handle(sock, addr):
# client connected
start_time = time.perf_counter()
sock.sendall(create_example())
sock.close()
total_time = time.perf_counter() - start_time
response_times.append(total_time)
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
log.debug('average response time: {}'.format(get_avg_time()))
log.debug('Bye!')
|
# FIXME: pyuv_cffi needs to build the library BEFORE the standard library is patched
import pyuv_cffi
print('pyuv_cffi imported', pyuv_cffi)
import guv
guv.monkey_patch()
import guv.server
import logging
import time
from util import create_example
import logger
if not hasattr(time, 'perf_counter'):
time.perf_counter = time.clock
logger.configure()
log = logging.getLogger()
response_times = []
def get_avg_time():
global response_times
times = response_times[-1000:]
avg = sum(times) / len(times)
if len(response_times) > 5000:
response_times = times
return avg
def handle(sock, addr):
# client connected
start_time = time.perf_counter()
sock.sendall(create_example())
sock.close()
total_time = time.perf_counter() - start_time
response_times.append(total_time)
if __name__ == '__main__':
pool = guv.GreenPool()
try:
log.debug('Start')
server_sock = guv.listen(('0.0.0.0', 8001))
server = guv.server.Server(server_sock, handle, pool, 'spawn_n')
server.start()
except (SystemExit, KeyboardInterrupt):
log.debug('average response time: {}'.format(get_avg_time()))
log.debug('Bye!')
|
Add temporary workaround for monkey-patching bug
|
Add temporary workaround for monkey-patching bug
pyuv_cffi needs to be imported BEFORE monkey-patching the standard library in
order to successfully build the shared library. Need to find a workaround for
this. Once the library is built, subsequent imports will work fine even after
monkey-patching.
|
Python
|
mit
|
veegee/guv,veegee/guv
|
---
+++
@@ -1,3 +1,7 @@
+# FIXME: pyuv_cffi needs to build the library BEFORE the standard library is patched
+import pyuv_cffi
+
+print('pyuv_cffi imported', pyuv_cffi)
import guv
guv.monkey_patch()
|
82ba04d609c80fd2bf8034cf38654d10bb72aca5
|
src/app/actions/psmtable/filter_confidence.py
|
src/app/actions/psmtable/filter_confidence.py
|
from app.readers import tsv as tsvreader
def filter_psms(psms, confkey, conflvl, lower_is_better):
for psm in psms:
if passes_filter(psm, conflvl, confkey, lower_is_better):
yield psm
def passes_filter(psm, threshold, confkey, lower_is_better):
if psm[confkey] in ['NA', '', None, False]:
return False
lower = float(psm[confkey]) < float(threshold)
return lower == lower_is_better
|
from app.readers import tsv as tsvreader
def filter_psms(psms, confkey, conflvl, lower_is_better):
for psm in psms:
if passes_filter(psm, conflvl, confkey, lower_is_better):
yield psm
def passes_filter(psm, threshold, confkey, lower_is_better):
try:
confval = float(psm[confkey])
except (TypeError, ValueError):
return False
else:
lower = confval < float(threshold)
return lower == lower_is_better
|
Fix confidence filtering removed confidence=0 (False) items
|
Fix confidence filtering removed confidence=0 (False) items
|
Python
|
mit
|
glormph/msstitch
|
---
+++
@@ -8,7 +8,10 @@
def passes_filter(psm, threshold, confkey, lower_is_better):
- if psm[confkey] in ['NA', '', None, False]:
+ try:
+ confval = float(psm[confkey])
+ except (TypeError, ValueError):
return False
- lower = float(psm[confkey]) < float(threshold)
- return lower == lower_is_better
+ else:
+ lower = confval < float(threshold)
+ return lower == lower_is_better
|
6a754b4a52619f84346a1cc89148884cefb3bc78
|
motobot/irc_level.py
|
motobot/irc_level.py
|
class IRCLevel:
""" Enum class (Not really) for userlevels. """
user = 0
voice = 1
hop = 2
op = 3
aop = 4
sop = 5
def get_userlevels(nick):
""" Return the userlevels in a list from a nick. """
mapping = {
'+': IRCLevel.voice,
'%': IRCLevel.hop,
'@': IRCLevel.op,
'&': IRCLevel.aop,
'~': IRCLevel.sop
}
levels = [0]
for c in nick:
level = mapping.get(c, IRCLevel.user)
if level > 0:
levels.append(level)
return levels
|
class IRCLevel:
""" Enum class (Not really) for userlevels. """
user = 0
vop = 1
hop = 2
aop = 3
sop = 4
owner = 5
master = 6
|
Update IRCLevel and remove get_userlevels
|
Update IRCLevel and remove get_userlevels
|
Python
|
mit
|
Motoko11/MotoBot
|
---
+++
@@ -1,26 +1,9 @@
class IRCLevel:
""" Enum class (Not really) for userlevels. """
user = 0
- voice = 1
+ vop = 1
hop = 2
- op = 3
- aop = 4
- sop = 5
-
-
-def get_userlevels(nick):
- """ Return the userlevels in a list from a nick. """
- mapping = {
- '+': IRCLevel.voice,
- '%': IRCLevel.hop,
- '@': IRCLevel.op,
- '&': IRCLevel.aop,
- '~': IRCLevel.sop
- }
- levels = [0]
-
- for c in nick:
- level = mapping.get(c, IRCLevel.user)
- if level > 0:
- levels.append(level)
- return levels
+ aop = 3
+ sop = 4
+ owner = 5
+ master = 6
|
ac725f0d96cfe6ef989d3377e5e7ed9e339fe7e5
|
djangoautoconf/auth/ldap_backend_wrapper.py
|
djangoautoconf/auth/ldap_backend_wrapper.py
|
from django_auth_ldap.backend import LDAPBackend
class LDAPBackendWrapper(LDAPBackend):
# def authenticate(self, identification, password, **kwargs):
# return super(LDAPBackendWrapper, self).authenticate(identification, password, **kwargs)
def authenticate(self, **kwargs):
if "username" in kwargs:
username = kwargs["username"]
del kwargs["username"]
elif "identification" in kwargs:
username = kwargs["identification"]
del kwargs["identification"]
password = kwargs["password"]
del kwargs["password"]
return super(LDAPBackendWrapper, self).authenticate(username, password, **kwargs)
# return None
|
from django_auth_ldap.backend import LDAPBackend
class LDAPBackendWrapper(LDAPBackend):
# def authenticate(self, identification, password, **kwargs):
# return super(LDAPBackendWrapper, self).authenticate(identification, password, **kwargs)
def authenticate(self, **kwargs):
if "username" in kwargs:
username = kwargs["username"]
del kwargs["username"]
elif "identification" in kwargs:
username = kwargs["identification"]
del kwargs["identification"]
password = kwargs["password"]
del kwargs["password"]
return super(LDAPBackendWrapper, self).authenticate(username=username, password=password, **kwargs)
# return None
|
Update codes for ldap wrapper so the username and password are passed to authenticate correctly.
|
Update codes for ldap wrapper so the username and password are passed to authenticate correctly.
|
Python
|
bsd-3-clause
|
weijia/djangoautoconf,weijia/djangoautoconf
|
---
+++
@@ -11,8 +11,7 @@
elif "identification" in kwargs:
username = kwargs["identification"]
del kwargs["identification"]
-
password = kwargs["password"]
del kwargs["password"]
- return super(LDAPBackendWrapper, self).authenticate(username, password, **kwargs)
+ return super(LDAPBackendWrapper, self).authenticate(username=username, password=password, **kwargs)
# return None
|
bc2246e8efa3a8d196c95ceb6d028f3b655b70c5
|
hooks/pre_gen_project.py
|
hooks/pre_gen_project.py
|
import re
MODULE_REGEX = r"^[_a-zA-Z][_a-zA-Z0-9]*$"
ENVIRON_REGEX = r"^[_a-zA-Z][_a-zA-Z0-9]*$"
PYTHONVERSION_REGEX = r"^(3)\.[6-9]$"
module_name = "{{ cookiecutter.project_slug}}"
if not re.match(MODULE_REGEX, module_name):
raise ValueError(
f"""
ERROR: The project slug ({module_name}) is not a valid Python module name.
Please do not use anything other than letters, numbers and '_',
and do not start with a number.
"""
)
environment_name = "{{ cookiecutter.create_conda_environment_with_name }}"
if not re.match(ENVIRON_REGEX, environment_name):
raise ValueError(
f"""
ERROR: The project slug ({environment_name}) is not a valid Python module name.
Please do not use anything other than letters, numbers and '_',
and do not start with a number.
"""
)
python_version = "{{ cookiecutter.python_version }}"
if not re.match(PYTHONVERSION_REGEX, python_version):
raise ValueError(
f"""
ERROR: The python version must be >= 3.6
"""
)
|
import re
MODULE_REGEX = r"^[-_a-zA-Z0-9]*$"
ENVIRON_REGEX = r"^[-_a-zA-Z0-9]*$"
PYTHONVERSION_REGEX = r"^(3)\.[6-9]$"
module_name = "{{ cookiecutter.project_slug}}"
if not re.match(MODULE_REGEX, module_name):
raise ValueError(
f"""
ERROR: The project slug ({module_name}) is not a valid name.
Please do not use anything other than letters, numbers, underscores '_',
and minus signs '-'.
"""
)
environment_name = "{{ cookiecutter.create_conda_environment_with_name }}"
if not re.match(ENVIRON_REGEX, environment_name):
raise ValueError(
f"""
ERROR: The project slug ({module_name}) is not a valid name.
Please do not use anything other than letters, numbers, underscores '_',
and minus signs '-'.
"""
)
python_version = "{{ cookiecutter.python_version }}"
if not re.match(PYTHONVERSION_REGEX, python_version):
raise ValueError(
f"""
ERROR: The python version must be >= 3.6
"""
)
|
Allow for minus signs in project slug.
|
Allow for minus signs in project slug.
|
Python
|
bsd-3-clause
|
hmgaudecker/econ-project-templates,hmgaudecker/econ-project-templates,hmgaudecker/econ-project-templates
|
---
+++
@@ -1,8 +1,8 @@
import re
-MODULE_REGEX = r"^[_a-zA-Z][_a-zA-Z0-9]*$"
-ENVIRON_REGEX = r"^[_a-zA-Z][_a-zA-Z0-9]*$"
+MODULE_REGEX = r"^[-_a-zA-Z0-9]*$"
+ENVIRON_REGEX = r"^[-_a-zA-Z0-9]*$"
PYTHONVERSION_REGEX = r"^(3)\.[6-9]$"
module_name = "{{ cookiecutter.project_slug}}"
@@ -11,10 +11,10 @@
raise ValueError(
f"""
-ERROR: The project slug ({module_name}) is not a valid Python module name.
+ERROR: The project slug ({module_name}) is not a valid name.
-Please do not use anything other than letters, numbers and '_',
-and do not start with a number.
+Please do not use anything other than letters, numbers, underscores '_',
+and minus signs '-'.
"""
)
@@ -25,12 +25,12 @@
raise ValueError(
f"""
- ERROR: The project slug ({environment_name}) is not a valid Python module name.
+ERROR: The project slug ({module_name}) is not a valid name.
- Please do not use anything other than letters, numbers and '_',
- and do not start with a number.
+Please do not use anything other than letters, numbers, underscores '_',
+and minus signs '-'.
- """
+"""
)
python_version = "{{ cookiecutter.python_version }}"
|
4c4bfbce3658fdac1e774a9aa2037fb1c466e21d
|
features/support/splinter_client.py
|
features/support/splinter_client.py
|
from pymongo import MongoClient
from splinter import Browser
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
from pymongo import MongoClient
from splinter import Browser
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=3)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
Decrease splinter timeout to 3 seconds
|
Decrease splinter timeout to 3 seconds
@alexmuller
@maxfliri
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
---
+++
@@ -14,7 +14,7 @@
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
- self.browser = Browser('phantomjs', wait_time=15)
+ self.browser = Browser('phantomjs', wait_time=3)
def after_scenario(self):
self.browser.quit()
|
7e44e92e574efe110546a9d3a5e4807fa74fec6e
|
sympy/interactive/ipythonprinting.py
|
sympy/interactive/ipythonprinting.py
|
"""
A print function that pretty prints SymPy objects.
:moduleauthor: Brian Granger
Usage
=====
To use this extension, execute:
%load_ext sympy.interactive.ipythonprinting
Once the extension is loaded, SymPy Basic objects are automatically
pretty-printed in the terminal and rendered in LaTeX in the Qt console and
notebook.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import IPython
from sympy.interactive.printing import init_printing
#-----------------------------------------------------------------------------
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
_loaded = False
def load_ipython_extension(ip):
"""Load the extension in IPython."""
global _loaded
# Use extension manager to track loaded status if available
# This is currently in IPython 0.14.dev
if hasattr(ip.extension_manager, 'loaded'):
loaded = 'sympy.interactive.ipythonprinting' not in ip.extension_manager.loaded
else:
loaded = _loaded
if not loaded:
if isinstance(ip, IPython.frontend.terminal.interactiveshell.TerminalInteractiveShell):
init_printing(ip=ip)
else:
init_printing(use_unicode=True, use_latex=True, ip=ip)
_loaded = True
|
"""
A print function that pretty prints SymPy objects.
:moduleauthor: Brian Granger
Usage
=====
To use this extension, execute:
%load_ext sympy.interactive.ipythonprinting
Once the extension is loaded, SymPy Basic objects are automatically
pretty-printed in the terminal and rendered in LaTeX in the Qt console and
notebook.
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2008 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from sympy.interactive.printing import init_printing
#-----------------------------------------------------------------------------
# Definitions of special display functions for use with IPython
#-----------------------------------------------------------------------------
_loaded = False
def load_ipython_extension(ip):
"""Load the extension in IPython."""
import IPython
global _loaded
# Use extension manager to track loaded status if available
# This is currently in IPython 0.14.dev
if hasattr(ip.extension_manager, 'loaded'):
loaded = 'sympy.interactive.ipythonprinting' not in ip.extension_manager.loaded
else:
loaded = _loaded
if not loaded:
if isinstance(ip, IPython.frontend.terminal.interactiveshell.TerminalInteractiveShell):
init_printing(ip=ip)
else:
init_printing(use_unicode=True, use_latex=True, ip=ip)
_loaded = True
|
Fix testing error when IPython not installed
|
Fix testing error when IPython not installed
|
Python
|
bsd-3-clause
|
moble/sympy,jaimahajan1997/sympy,pbrady/sympy,yukoba/sympy,cccfran/sympy,asm666/sympy,kaushik94/sympy,jbbskinny/sympy,garvitr/sympy,lindsayad/sympy,dqnykamp/sympy,oliverlee/sympy,abloomston/sympy,dqnykamp/sympy,grevutiu-gabriel/sympy,hrashk/sympy,maniteja123/sympy,liangjiaxing/sympy,rahuldan/sympy,Designist/sympy,debugger22/sympy,Titan-C/sympy,atsao72/sympy,mafiya69/sympy,jamesblunt/sympy,sahmed95/sympy,postvakje/sympy,oliverlee/sympy,beni55/sympy,sunny94/temp,MridulS/sympy,garvitr/sympy,Davidjohnwilson/sympy,abloomston/sympy,lidavidm/sympy,drufat/sympy,emon10005/sympy,sahilshekhawat/sympy,drufat/sympy,garvitr/sympy,jerli/sympy,dqnykamp/sympy,sampadsaha5/sympy,jamesblunt/sympy,kevalds51/sympy,Shaswat27/sympy,cccfran/sympy,hrashk/sympy,Shaswat27/sympy,souravsingh/sympy,meghana1995/sympy,VaibhavAgarwalVA/sympy,rahuldan/sympy,Titan-C/sympy,abhiii5459/sympy,maniteja123/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,iamutkarshtiwari/sympy,oliverlee/sympy,MridulS/sympy,sahilshekhawat/sympy,wyom/sympy,AkademieOlympia/sympy,cswiercz/sympy,Mitchkoens/sympy,abhiii5459/sympy,grevutiu-gabriel/sympy,pandeyadarsh/sympy,shikil/sympy,asm666/sympy,Mitchkoens/sympy,cccfran/sympy,wanglongqi/sympy,ahhda/sympy,toolforger/sympy,diofant/diofant,Designist/sympy,shipci/sympy,vipulroxx/sympy,kmacinnis/sympy,Vishluck/sympy,wanglongqi/sympy,asm666/sympy,lindsayad/sympy,jbbskinny/sympy,sahmed95/sympy,cswiercz/sympy,liangjiaxing/sympy,postvakje/sympy,kumarkrishna/sympy,skidzo/sympy,bukzor/sympy,rahuldan/sympy,skirpichev/omg,chaffra/sympy,Curious72/sympy,emon10005/sympy,Shaswat27/sympy,kevalds51/sympy,atsao72/sympy,meghana1995/sympy,wyom/sympy,moble/sympy,hrashk/sympy,Mitchkoens/sympy,sampadsaha5/sympy,pbrady/sympy,ahhda/sympy,madan96/sympy,ga7g08/sympy,kaushik94/sympy,sahmed95/sympy,vipulroxx/sympy,kumarkrishna/sympy,atreyv/sympy,sunny94/temp,saurabhjn76/sympy,yukoba/sympy,AunShiLord/sympy,kaushik94/sympy,atreyv/sympy,lidavidm/sympy,ChristinaZografou/sympy,VaibhavAgarwalVA/sympy,ga7g08/sympy,farhaanbukhsh/sympy,shipci/sympy,vipulroxx/sympy,grevutiu-gabriel/sympy,bukzor/sympy,jerli/sympy,Arafatk/sympy,Sumith1896/sympy,iamutkarshtiwari/sympy,lindsayad/sympy,mafiya69/sympy,Gadal/sympy,Arafatk/sympy,lidavidm/sympy,ChristinaZografou/sympy,mafiya69/sympy,hargup/sympy,liangjiaxing/sympy,beni55/sympy,MridulS/sympy,madan96/sympy,atreyv/sympy,saurabhjn76/sympy,Arafatk/sympy,yashsharan/sympy,Sumith1896/sympy,MechCoder/sympy,jbbskinny/sympy,AkademieOlympia/sympy,Curious72/sympy,chaffra/sympy,kmacinnis/sympy,meghana1995/sympy,mcdaniel67/sympy,Titan-C/sympy,aktech/sympy,shipci/sympy,Davidjohnwilson/sympy,chaffra/sympy,drufat/sympy,sunny94/temp,shikil/sympy,skidzo/sympy,pandeyadarsh/sympy,madan96/sympy,skidzo/sympy,Vishluck/sympy,debugger22/sympy,sampadsaha5/sympy,mcdaniel67/sympy,yukoba/sympy,emon10005/sympy,toolforger/sympy,moble/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,AunShiLord/sympy,wyom/sympy,farhaanbukhsh/sympy,AkademieOlympia/sympy,jerli/sympy,debugger22/sympy,jaimahajan1997/sympy,aktech/sympy,Curious72/sympy,MechCoder/sympy,wanglongqi/sympy,amitjamadagni/sympy,kaichogami/sympy,aktech/sympy,ChristinaZografou/sympy,hargup/sympy,atsao72/sympy,kumarkrishna/sympy,cswiercz/sympy,postvakje/sympy,beni55/sympy,pbrady/sympy,shikil/sympy,kaichogami/sympy,pandeyadarsh/sympy,bukzor/sympy,abhiii5459/sympy,saurabhjn76/sympy,abloomston/sympy,Davidjohnwilson/sympy,MechCoder/sympy,amitjamadagni/sympy,Gadal/sympy,toolforger/sympy,AunShiLord/sympy,souravsingh/sympy,VaibhavAgarwalVA/sympy,souravsingh/sympy,sahilshekhawat/sympy,yashsharan/sympy,farhaanbukhsh/sympy,ahhda/sympy,kevalds51/sympy,Sumith1896/sympy,kaichogami/sympy,jamesblunt/sympy,Designist/sympy,kmacinnis/sympy,ga7g08/sympy,Gadal/sympy,maniteja123/sympy,Vishluck/sympy,hargup/sympy
|
---
+++
@@ -26,7 +26,6 @@
# Imports
#-----------------------------------------------------------------------------
-import IPython
from sympy.interactive.printing import init_printing
#-----------------------------------------------------------------------------
@@ -37,6 +36,8 @@
def load_ipython_extension(ip):
"""Load the extension in IPython."""
+ import IPython
+
global _loaded
# Use extension manager to track loaded status if available
# This is currently in IPython 0.14.dev
|
5c928ea4c3f45cb32f7209a2c63a1c010d5860e0
|
app/models.py
|
app/models.py
|
from app import db
class Base(db.Model):
__abstract__ = True
id = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Route <{0}-{1}-{2}>'.format(self.origin_point,
self.destination_point,
self.distance)
|
from app import db
class Base(db.Model):
__abstract__ = True
pk = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
class Route(Base):
__tablename__ = 'routes'
origin_point = db.Column(db.String(128), nullable=False)
destination_point = db.Column(db.String(128), nullable=False)
distance = db.Column(db.Integer, nullable=False)
def __repr__(self):
return '<Route <{0}-{1}-{2}>'.format(self.origin_point,
self.destination_point,
self.distance)
|
Change field "id" to "pk" in order to not conflict with Python "id" keyword
|
Change field "id" to "pk" in order to not conflict with Python "id" keyword
|
Python
|
mit
|
mdsrosa/routes_api_python
|
---
+++
@@ -5,7 +5,7 @@
__abstract__ = True
- id = db.Column(db.Integer, primary_key=True)
+ pk = db.Column(db.Integer, primary_key=True)
created_at = db.Column(db.DateTime, default=db.func.current_timestamp())
updated_at = db.Column(db.DateTime, default=db.func.current_timestamp())
|
1b093c116ff7fa926caa166c835fb3add4bf0036
|
scale/util/dcos.py
|
scale/util/dcos.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import json
import requests
from django.conf import settings
from mesoshttp.acs import DCOSServiceAuth
DCOS_AUTH = None
DCOS_VERIFY = True
if settings.SERVICE_SECRET:
# We are in Enterprise mode and using service account
DCOS_AUTH = DCOSServiceAuth((json.loads(settings.SERVICE_SECRET)))
cert_file = 'dcos-ca.crt'
response = requests.get('https://leader.mesos/ca/' + cert_file, verify=False)
if response.status_code == 200:
with open(cert_file, 'w') as cert:
cert.write(response.text)
DCOS_VERIFY = cert_file
def make_dcos_request(host_address, relative_url, params=None):
"""Makes a requests that is capable of traversing DCOS EE Strict boundary
:param master: The address for the Mesos master
:type master: `util.host.HostAddress`
:param relative_url: URL path relative to the base address
:type relative_url: basestring
:param params: The query parameters for request
:type params: dict
:returns: The request response object
:rtype: :class:`requests.Response`
"""
return requests.get('%s://%s:%s%s' % (host_address.protocol,
host_address.hostname,
host_address.port,
relative_url),
param=params,
auth=DCOS_AUTH,
verify=DCOS_VERIFY)
|
from __future__ import absolute_import
from __future__ import unicode_literals
import json
import requests
from django.conf import settings
from mesoshttp.acs import DCOSServiceAuth
DCOS_AUTH = None
DCOS_VERIFY = True
if settings.SERVICE_SECRET:
# We are in Enterprise mode and using service account
DCOS_AUTH = DCOSServiceAuth((json.loads(settings.SERVICE_SECRET)))
cert_file = 'dcos-ca.crt'
response = requests.get('https://leader.mesos/ca/' + cert_file, verify=False)
if response.status_code == 200:
with open(cert_file, 'w') as cert:
cert.write(response.text)
DCOS_VERIFY = cert_file
def make_dcos_request(host_address, relative_url, params=None):
"""Makes a requests that is capable of traversing DCOS EE Strict boundary
:param master: The address for the Mesos master
:type master: `util.host.HostAddress`
:param relative_url: URL path relative to the base address
:type relative_url: basestring
:param params: The query parameters for request
:type params: dict
:returns: The request response object
:rtype: :class:`requests.Response`
"""
return requests.get('%s://%s:%s%s' % (host_address.protocol,
host_address.hostname,
host_address.port,
relative_url),
params=params,
auth=DCOS_AUTH,
verify=DCOS_VERIFY)
|
Fix typo in requests helper
|
Fix typo in requests helper
|
Python
|
apache-2.0
|
ngageoint/scale,ngageoint/scale,ngageoint/scale,ngageoint/scale
|
---
+++
@@ -39,6 +39,6 @@
host_address.hostname,
host_address.port,
relative_url),
- param=params,
+ params=params,
auth=DCOS_AUTH,
verify=DCOS_VERIFY)
|
3c13870ffd25a31006cadbf9a9793566cffaecb6
|
win-installer/gaphor-script.py
|
win-installer/gaphor-script.py
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.core.modeling import ElementFactory
from gaphor.plugins.console import ConsoleWindow
from gaphor.plugins.diagramexport import DiagramExport
from gaphor.plugins.xmiexport import XMIExport
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.services.copyservice import CopyService
from gaphor.core.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.services.properties import Properties
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.session import Session
from gaphor.services.undomanager import UndoManager
from gaphor.ui.elementeditor import ElementEditor
from gaphor.ui.appfilemanager import AppFileManager
from gaphor.ui.filemanager import FileManager
from gaphor.ui.mainwindow import Diagrams
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.menufragment import MenuFragment
from gaphor.ui.namespace import Namespace
from gaphor.ui.preferences import Preferences
from gaphor.ui.recentfiles import RecentFiles
from gaphor.ui.toolbox import Toolbox
from gaphor.ui import main
import sys
main(sys.argv)
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.core.modeling import ElementFactory
from gaphor.plugins.console import ConsoleWindow
from gaphor.plugins.diagramexport import DiagramExport
from gaphor.plugins.xmiexport import XMIExport
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.services.copyservice import CopyService
from gaphor.core.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.services.properties import Properties
from gaphor.UML.sanitizerservice import SanitizerService
from gaphor.services.session import Session
from gaphor.services.undomanager import UndoManager
from gaphor.ui.elementeditor import ElementEditor
from gaphor.ui.appfilemanager import AppFileManager
from gaphor.ui.filemanager import FileManager
from gaphor.ui.mainwindow import Diagrams
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.menufragment import MenuFragment
from gaphor.ui.namespace import Namespace
from gaphor.ui.preferences import Preferences
from gaphor.ui.recentfiles import RecentFiles
from gaphor.ui.toolbox import Toolbox
from gaphor.ui import main
import sys
main(sys.argv)
|
Fix sanitizer service reference for windows
|
Fix sanitizer service reference for windows
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
---
+++
@@ -10,7 +10,7 @@
from gaphor.core.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.services.properties import Properties
- from gaphor.services.sanitizerservice import SanitizerService
+ from gaphor.UML.sanitizerservice import SanitizerService
from gaphor.services.session import Session
from gaphor.services.undomanager import UndoManager
from gaphor.ui.elementeditor import ElementEditor
|
3b21be6f0711163fdb6f1cf99514fae04f395b62
|
romanesco/plugins/swift/tests/swift_test.py
|
romanesco/plugins/swift/tests/swift_test.py
|
import romanesco
import unittest
class TestSwiftMode(unittest.TestCase):
def testSwiftMode(self):
task = {
'mode': 'swift',
'script': """
type file;
app (file out) echo_app (string s)
{
echo s stdout=filename(out);
}
string a = arg("a", "10");
file out <"out.csv">;
out = echo_app(strcat("a,b,c\\n", a, ",2,3"));
""",
'inputs': [{
'id': 'a',
'format': 'json',
'type': 'number'
}],
'swift_args': ['-a=$input{a}'],
'outputs': [{
'id': 'out.csv',
'type': 'table',
'format': 'csv'
}]
}
inputs = {
'a': {
'format': 'number',
'data': 5
}
}
out = romanesco.run(task, inputs=inputs)
self.assertEqual(out, {
'out.csv': {
'data': 'a,b,c\n5,2,3\n',
'format': 'csv'
}
})
|
import os
import romanesco
import shutil
import unittest
def setUpModule():
global _tmp
global _cwd
_cwd = os.getcwd()
_tmp = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'tmp', 'swift')
if not os.path.isdir(_tmp):
os.makedirs(_tmp)
os.chdir(_tmp)
def tearDownModule():
os.chdir(_cwd)
if os.path.isdir(_tmp):
shutil.rmtree(_tmp)
class TestSwiftMode(unittest.TestCase):
def testSwiftMode(self):
task = {
'mode': 'swift',
'script': """
type file;
app (file out) echo_app (string s)
{
echo s stdout=filename(out);
}
string a = arg("a", "10");
file out <"out.csv">;
out = echo_app(strcat("a,b,c\\n", a, ",2,3"));
""",
'inputs': [{
'id': 'a',
'format': 'json',
'type': 'number'
}],
'swift_args': ['-a=$input{a}'],
'outputs': [{
'id': 'out.csv',
'type': 'table',
'format': 'csv'
}]
}
inputs = {
'a': {
'format': 'number',
'data': 5
}
}
out = romanesco.run(task, inputs=inputs)
self.assertEqual(out, {
'out.csv': {
'data': 'a,b,c\n5,2,3\n',
'format': 'csv'
}
})
|
Clean up after swift run
|
Clean up after swift run
|
Python
|
apache-2.0
|
girder/girder_worker,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,girder/girder_worker,Kitware/romanesco,girder/girder_worker
|
---
+++
@@ -1,5 +1,24 @@
+import os
import romanesco
+import shutil
import unittest
+
+
+def setUpModule():
+ global _tmp
+ global _cwd
+ _cwd = os.getcwd()
+ _tmp = os.path.join(
+ os.path.dirname(os.path.abspath(__file__)), 'tmp', 'swift')
+ if not os.path.isdir(_tmp):
+ os.makedirs(_tmp)
+ os.chdir(_tmp)
+
+
+def tearDownModule():
+ os.chdir(_cwd)
+ if os.path.isdir(_tmp):
+ shutil.rmtree(_tmp)
class TestSwiftMode(unittest.TestCase):
|
78deb7cc734bd5eaca9678bd61fa164699f21121
|
tohu/cloning.py
|
tohu/cloning.py
|
__all__ = ['CloneableMeta']
def attach_new_init_method(cls):
"""
Replace the existing cls.__init__() method with a new one which
also initialises the _clones attribute to an empty list.
"""
orig_init = cls.__init__
def new_init(self, *args, **kwargs):
orig_init(self, *args, **kwargs)
self._clones = []
cls.__init__ = new_init
class CloneableMeta(type):
def __new__(metacls, cg_name, bases, clsdict):
new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict)
attach_new_init_method(new_cls)
return new_cls
|
__all__ = ['CloneableMeta']
def attach_new_init_method(cls):
"""
Replace the existing cls.__init__() method with a new one which
also initialises the _clones attribute to an empty list.
"""
orig_init = cls.__init__
def new_init(self, *args, **kwargs):
orig_init(self, *args, **kwargs)
self._clones = []
cls.__init__ = new_init
class CloneableMeta(type):
def __new__(metacls, cg_name, bases, clsdict):
new_cls = super(CloneableMeta, metacls).__new__(metacls, cg_name, bases, clsdict)
attach_new_init_method(new_cls)
return new_cls
|
Add newline at end of file
|
Add newline at end of file
|
Python
|
mit
|
maxalbert/tohu
| |
f2fd224b5e3c8cb4a919e082c47c603d4469a564
|
jacquard/buckets/tests/test_bucket.py
|
jacquard/buckets/tests/test_bucket.py
|
import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
assert not bucket.needs_constraints()
|
import pytest
from jacquard.odm import Session
from jacquard.buckets import Bucket
from jacquard.buckets.constants import NUM_BUCKETS
@pytest.mark.parametrize('divisor', (
2,
3,
4,
5,
6,
10,
100,
))
def test_divisible(divisor):
assert NUM_BUCKETS % divisor == 0
def test_at_least_three_buckets_per_percent():
assert NUM_BUCKETS / 100 >= 3
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
# Force bucket to a string in order to reify the fields. This validates
# that the fields are accessible.
str(bucket)
|
Use an explicit test here
|
Use an explicit test here
|
Python
|
mit
|
prophile/jacquard,prophile/jacquard
|
---
+++
@@ -25,4 +25,6 @@
def test_can_get_empty_bucket_from_old_format():
session = Session({'buckets/1': []})
bucket = session.get(Bucket, 1)
- assert not bucket.needs_constraints()
+ # Force bucket to a string in order to reify the fields. This validates
+ # that the fields are accessible.
+ str(bucket)
|
7a0ed88e1775429ce283cc315cc05ea3dbde229f
|
tests/response_construction_tests.py
|
tests/response_construction_tests.py
|
from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from .helpers import RequestPatchMixin
from .test_views import TestProxy
class ResponseConstructionTest(TestCase, RequestPatchMixin):
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = RequestFactory().get('/')
self.proxy_stub = Mock(
content='upstream content', headers={
'Fake-Header': '123',
'Transfer-Encoding': 'foo'
}, status_code=201)
self.patch_request(self.proxy_stub)
self.response = self.proxy(self.browser_request)
class HttpProxyContentPassThrough(ResponseConstructionTest):
def test_creates_response_object_with_proxied_content(self):
self.assertEqual(
self.response.content.decode('utf-8'), 'upstream content')
def test_creates_response_object_with_proxied_status(self):
self.assertEqual(self.response.status_code, 201)
class HttpProxyHeaderPassThrough(ResponseConstructionTest):
def test_sets_upstream_headers_on_response_object(self):
self.assertEqual(self.response['Fake-Header'], '123')
def test_doesnt_set_ignored_upstream_headers_on_response_obj(self):
self.assertFalse(self.response.has_header('Transfer-Encoding'))
|
from django.test.client import RequestFactory
from mock import Mock
from unittest2 import TestCase
from .helpers import RequestPatchMixin
from .test_views import TestProxy
class ResponseConstructionTest(TestCase, RequestPatchMixin):
def get_request(self):
return RequestFactory().get('/')
def setUp(self):
self.proxy = TestProxy.as_view()
self.browser_request = self.get_request()
self.proxy_stub = Mock(
content='upstream content', headers={
'Fake-Header': '123',
'Transfer-Encoding': 'foo'
}, status_code=201)
self.patch_request(self.proxy_stub)
self.response = self.proxy(self.browser_request)
class HttpProxyContentPassThrough(ResponseConstructionTest):
def test_creates_response_object_with_proxied_content(self):
self.assertEqual(
self.response.content.decode('utf-8'), 'upstream content')
def test_creates_response_object_with_proxied_status(self):
self.assertEqual(self.response.status_code, 201)
class HttpProxyHeaderPassThrough(ResponseConstructionTest):
def test_sets_upstream_headers_on_response_object(self):
self.assertEqual(self.response['Fake-Header'], '123')
def test_doesnt_set_ignored_upstream_headers_on_response_obj(self):
self.assertFalse(self.response.has_header('Transfer-Encoding'))
class HttpProxyEmptyContentLengthHandling(ResponseConstructionTest):
def get_request(self):
request = RequestFactory().get('/')
request.META['CONTENT_LENGTH'] = ''
return request
def test_succeeds(self):
self.assertEqual(self.response.status_code, 201)
|
Add test coverage for 1.10 CONTENT_LENGTH hack
|
Add test coverage for 1.10 CONTENT_LENGTH hack
|
Python
|
mit
|
thomasw/djproxy
|
---
+++
@@ -7,9 +7,12 @@
class ResponseConstructionTest(TestCase, RequestPatchMixin):
+ def get_request(self):
+ return RequestFactory().get('/')
+
def setUp(self):
self.proxy = TestProxy.as_view()
- self.browser_request = RequestFactory().get('/')
+ self.browser_request = self.get_request()
self.proxy_stub = Mock(
content='upstream content', headers={
@@ -36,3 +39,14 @@
def test_doesnt_set_ignored_upstream_headers_on_response_obj(self):
self.assertFalse(self.response.has_header('Transfer-Encoding'))
+
+
+class HttpProxyEmptyContentLengthHandling(ResponseConstructionTest):
+ def get_request(self):
+ request = RequestFactory().get('/')
+ request.META['CONTENT_LENGTH'] = ''
+
+ return request
+
+ def test_succeeds(self):
+ self.assertEqual(self.response.status_code, 201)
|
83ca7677ac77d55f9ba978f2988b18faa9e74424
|
secondhand/urls.py
|
secondhand/urls.py
|
from django.conf.urls import patterns, include, url
from tastypie.api import Api
from tracker.api import UserResource, TaskResource, WorkSessionResource
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
# tracker API.
v1_api = Api(api_name='v1')
v1_api.register(UserResource())
v1_api.register(TaskResource())
v1_api.register(WorkSessionResource())
urlpatterns = patterns('',
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(v1_api.urls())),
)
|
from django.conf.urls import patterns, include, url
from tastypie.api import Api
from tracker.api import UserResource, TaskResource, WorkSessionResource, \
RegistrationResource
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
# tracker API.
v1_api = Api(api_name='v1')
v1_api.register(UserResource())
v1_api.register(RegistrationResource())
v1_api.register(TaskResource())
v1_api.register(WorkSessionResource())
urlpatterns = patterns('',
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
url(r'^api/', include(v1_api.urls)),
)
|
Fix minor issue, reorganize imports, and register the RegistrationResource with the API.
|
Fix minor issue, reorganize imports, and register the RegistrationResource with the API.
|
Python
|
mit
|
GeneralMaximus/secondhand
|
---
+++
@@ -1,6 +1,7 @@
from django.conf.urls import patterns, include, url
from tastypie.api import Api
-from tracker.api import UserResource, TaskResource, WorkSessionResource
+from tracker.api import UserResource, TaskResource, WorkSessionResource, \
+ RegistrationResource
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
@@ -9,6 +10,7 @@
# tracker API.
v1_api = Api(api_name='v1')
v1_api.register(UserResource())
+v1_api.register(RegistrationResource())
v1_api.register(TaskResource())
v1_api.register(WorkSessionResource())
@@ -18,5 +20,5 @@
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
- url(r'^api/', include(v1_api.urls())),
+ url(r'^api/', include(v1_api.urls)),
)
|
06f10e09f5b1c5766815b6e7eb219b4e33082709
|
check_urls.py
|
check_urls.py
|
#!/usr/bin/env python2.7
import re, sys, markdown, requests, bs4 as BeautifulSoup
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print 'Error checking URL %s: %s' % (url, e)
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print 'checking URLs for %s' % (filename,)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print msg, 'OK'
else:
print msg, 'FAILED'
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print e
ok = False
exit (0 if ok else 1)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python2.7
from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
try: # Python 2
reload
except NameError: # Python 3
from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
def check_url(url):
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
print('Error checking URL %s: %s' % (url, e))
return False
def retrieve_urls(filename):
with open(filename) as fd:
mdtext = fd.read()
html_text = markdown.markdown(mdtext)
soup = BeautifulSoup.BeautifulSoup(html_text, "html.parser")
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
print('checking URLs for %s' % filename)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
u = re.findall(r, url)
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
print(msg, 'OK')
else:
print(msg, 'FAILED')
ok = False
return ok
def main():
ok = True
for filename in sys.argv[1:]:
try:
ok &= check_urls(filename)
except IOError as e:
print(e)
ok = False
exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
Add Python 3 compatibility and flake8 testing
|
Add Python 3 compatibility and flake8 testing
|
Python
|
unlicense
|
ligurio/free-software-testing-books
|
---
+++
@@ -1,6 +1,12 @@
#!/usr/bin/env python2.7
+from __future__ import print_function
import re, sys, markdown, requests, bs4 as BeautifulSoup
+
+try: # Python 2
+ reload
+except NameError: # Python 3
+ from importlib import reload
reload(sys)
sys.setdefaultencoding('utf8')
@@ -9,7 +15,7 @@
try:
return bool(requests.head(url, allow_redirects=True))
except Exception as e:
- print 'Error checking URL %s: %s' % (url, e)
+ print('Error checking URL %s: %s' % (url, e))
return False
def retrieve_urls(filename):
@@ -20,7 +26,7 @@
return [a['href'] for a in soup.findAll('a')]
def check_urls(filename):
- print 'checking URLs for %s' % (filename,)
+ print('checking URLs for %s' % filename)
ok = True
for url in retrieve_urls(filename):
r = "(?:http[s]?://[^)]+)"
@@ -28,9 +34,9 @@
if not u: continue
msg = 'Checking %s => ' % (u[0],)
if check_url(u[0]):
- print msg, 'OK'
+ print(msg, 'OK')
else:
- print msg, 'FAILED'
+ print(msg, 'FAILED')
ok = False
return ok
@@ -40,9 +46,9 @@
try:
ok &= check_urls(filename)
except IOError as e:
- print e
+ print(e)
ok = False
- exit (0 if ok else 1)
+ exit(0 if ok else 1)
if __name__ == '__main__':
main()
|
9d1dc2ef7db2f883e05286edd3865acfdadc19be
|
django-oracle-drcp/base.py
|
django-oracle-drcp/base.py
|
# pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
poolconfig = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **poolconfig)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
|
# pylint: disable=W0401
from django.core.exceptions import ImproperlyConfigured
from django.db.backends.oracle.base import *
from django.db.backends.oracle.base import DatabaseWrapper as DjDatabaseWrapper
import cx_Oracle
class DatabaseWrapper(DjDatabaseWrapper):
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
default_pool = {
'min': 1,
'max': 2,
'increment': 1,
}
pool_config = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
raise ImproperlyConfigured('POOL database option values must be numeric')
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
dsn=self.settings_dict['NAME'], **pool_config)
def get_new_connection(self, conn_params):
conn_params.update({
'pool': self.pool,
})
return super(DatabaseWrapper, self).get_new_connection(conn_params)
def _close(self):
if self.connection is not None:
with self.wrap_database_errors:
return self.pool.release(self.connection)
|
Change variable name consistently to pool_config
|
Change variable name consistently to pool_config
|
Python
|
bsd-2-clause
|
JohnPapps/django-oracle-drcp
|
---
+++
@@ -15,7 +15,7 @@
'max': 2,
'increment': 1,
}
- poolconfig = self.settings_dict.get('POOL', default_pool)
+ pool_config = self.settings_dict.get('POOL', default_pool)
if set(pool_config.keys()) != {'min', 'max', 'increment'}:
raise ImproperlyConfigured('POOL database option requires \'min\', \'max\', and \'increment\'')
if not all(isinstance(val, int) for val in pool_config.values()):
@@ -23,7 +23,7 @@
self.pool = cx_Oracle.SessionPool(
user=self.settings_dict['USER'],
password=self.settings_dict['PASSWORD'],
- dsn=self.settings_dict['NAME'], **poolconfig)
+ dsn=self.settings_dict['NAME'], **pool_config)
def get_new_connection(self, conn_params):
conn_params.update({
|
c5ff897355fb7fce5022127bcae756e8c68dc864
|
data/views.py
|
data/views.py
|
import os
from django.shortcuts import render, redirect
from django.template import Context
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from chemtools.extractor import CORES, RGROUPS, ARYL
from data.models import JobTemplate
def frag_index(request):
xrnames = ["H", "Cl", "Br", "CN", "CCH", "OH",
"SH", "NH_2", "CH_3", "phenyl", "TMS", "OCH_3"]
arylnames = ["double bond", "triple bond", "phenyl",
"thiophene", "pyridine", "carbazole", "TZ", "EDOT"]
data = (
["Cores", CORES],
["X/R Groups", zip(RGROUPS, xrnames)],
["Aryl Groups", zip(ARYL, arylnames)],
)
c = Context({"usable_parts": data})
return render(request, "data/frag_index.html", c)
def get_frag(request, frag):
if frag in os.listdir("chemtools/data/"):
f = open("chemtools/data/" + frag, "r")
response = HttpResponse(FileWrapper(f), content_type="text/plain")
return response
else:
return redirect(frag_index)
def template_index(request):
c = Context({"templates": JobTemplate.objects.all()})
return render(request, "data/template_index.html", c)
|
import os
from django.shortcuts import render, redirect
from django.template import Context
from django.http import HttpResponse
from django.core.servers.basehttp import FileWrapper
from chemtools.extractor import CORES, RGROUPS, ARYL
from data.models import JobTemplate
def frag_index(request):
xrnames = ["H", "Cl", "Br", "CN", "CCH", "OH",
"SH", "NH_2", "CH_3", "phenyl", "TMS", "OCH_3", "F"]
arylnames = ["double bond", "triple bond", "phenyl", "thiophene",
"pyridine", "carbazole", "TZ", "EDOT", "DTF", "acetyl",
"furan", "pyrrole"]
data = (
["Cores", CORES],
["X/R Groups", zip(RGROUPS, xrnames)],
["Aryl Groups", zip(ARYL, arylnames)],
)
c = Context({"usable_parts": data})
return render(request, "data/frag_index.html", c)
def get_frag(request, frag):
if frag in os.listdir("chemtools/data/"):
f = open("chemtools/data/" + frag, "r")
response = HttpResponse(FileWrapper(f), content_type="text/plain")
return response
else:
return redirect(frag_index)
def template_index(request):
c = Context({"templates": JobTemplate.objects.all()})
return render(request, "data/template_index.html", c)
|
Add new fragments to data.frag_index
|
Add new fragments to data.frag_index
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
---
+++
@@ -10,9 +10,10 @@
def frag_index(request):
xrnames = ["H", "Cl", "Br", "CN", "CCH", "OH",
- "SH", "NH_2", "CH_3", "phenyl", "TMS", "OCH_3"]
- arylnames = ["double bond", "triple bond", "phenyl",
- "thiophene", "pyridine", "carbazole", "TZ", "EDOT"]
+ "SH", "NH_2", "CH_3", "phenyl", "TMS", "OCH_3", "F"]
+ arylnames = ["double bond", "triple bond", "phenyl", "thiophene",
+ "pyridine", "carbazole", "TZ", "EDOT", "DTF", "acetyl",
+ "furan", "pyrrole"]
data = (
["Cores", CORES],
["X/R Groups", zip(RGROUPS, xrnames)],
|
edec252d9a050ead0084280f9772f05a2a3d7608
|
preferences/forms.py
|
preferences/forms.py
|
from registration.forms import RegistrationFormUniqueEmail
class RegistrationUserForm(RegistrationFormUniqueEmail):
class Meta:
model = User
fields = ("email")
|
from django import forms
from registration.forms import RegistrationFormUniqueEmail
from preferences.models import Preferences
# from django.forms import ModelForm
# class RegistrationUserForm(RegistrationFormUniqueEmail):
# class Meta:
# model = User
# fields = ("email")
class PreferencesForm(forms.ModelForm):
class Meta:
model = Preferences
fields = ['representitive', 'senator', 'street_line1', 'street_line2',
'zipcode', 'city', 'state']
|
Add preferences form built off model
|
Add preferences form built off model
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
---
+++
@@ -1,7 +1,19 @@
+from django import forms
from registration.forms import RegistrationFormUniqueEmail
-class RegistrationUserForm(RegistrationFormUniqueEmail):
+from preferences.models import Preferences
+# from django.forms import ModelForm
+
+# class RegistrationUserForm(RegistrationFormUniqueEmail):
+
+# class Meta:
+# model = User
+# fields = ("email")
+
+
+class PreferencesForm(forms.ModelForm):
class Meta:
- model = User
- fields = ("email")
+ model = Preferences
+ fields = ['representitive', 'senator', 'street_line1', 'street_line2',
+ 'zipcode', 'city', 'state']
|
e6026134e02f516cc84e499494205efa0ad7441f
|
tests/test_autoconfig.py
|
tests/test_autoconfig.py
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.getcwd(), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.getcwd(), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
config = AutoConfig()
with patch('os.path.exists', return_value=False):
with patch.object(config, '_caller_path', return_value="/"):
with pytest.raises(RuntimeError):
config('KEY')
|
# coding: utf-8
import os
import pytest
from mock import patch
from decouple import AutoConfig
def test_autoconfig_env():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
def test_autoconfig_none():
config = AutoConfig()
with patch('os.path.exists', return_value=False):
with patch.object(config, '_caller_path', return_value="/"):
with pytest.raises(RuntimeError):
config('KEY')
|
Replace cwd with current module's path
|
Replace cwd with current module's path
|
Python
|
mit
|
mrkschan/python-decouple,flaviohenriqu/python-decouple,henriquebastos/django-decouple,liukaijv/python-decouple,henriquebastos/python-decouple
|
---
+++
@@ -7,14 +7,14 @@
def test_autoconfig_env():
config = AutoConfig()
- path = os.path.join(os.getcwd(), 'autoconfig', 'env', 'project')
+ path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'env', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'ENV' == config('KEY')
def test_autoconfig_ini():
config = AutoConfig()
- path = os.path.join(os.getcwd(), 'autoconfig', 'ini', 'project')
+ path = os.path.join(os.path.dirname(__file__), 'autoconfig', 'ini', 'project')
with patch.object(config, '_caller_path', return_value=path):
assert 'INI' == config('KEY')
|
ec884c9db173f093d1398de54d00f1c36f22d8e4
|
examples/random_valid_test_generator.py
|
examples/random_valid_test_generator.py
|
import sys
import time
from random import shuffle
from FairDistributor import FairDistributor
def main():
# User input for the number of targets and objects.
number_of_targets = int(sys.argv[1])
number_of_objects = int(sys.argv[2])
# Generate dummy lists for objects, targets and dummy matrix for weights
targets = ['target_{0}'.format(str(s))
for s in range(number_of_targets)]
objects = ['object_{0}'.format(str(s))
for s in range(number_of_objects)]
dummy_weights = list(range(1, number_of_objects+1))
weights_matrix = list()
for _ in range(number_of_targets):
new_random_weight_list = list(dummy_weights)
shuffle(new_random_weight_list)
weights_matrix.append(new_random_weight_list)
start_time = time.time()
distributor = FairDistributor(targets, objects, weights_matrix)
distributor.distribute()
elapsed_time = time.time() - start_time
print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format(
number_of_targets, number_of_objects, elapsed_time))
if __name__ == '__main__':
main()
|
import sys
import time
from random import shuffle
from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
number_of_targets = int(sys.argv[1])
number_of_objects = int(sys.argv[2])
# Generate dummy lists for objects, targets and dummy matrix for weights
targets = ['target_{0}'.format(str(s))
for s in range(number_of_targets)]
objects = ['object_{0}'.format(str(s))
for s in range(number_of_objects)]
dummy_weights = list(range(1, number_of_objects+1))
weights_matrix = list()
for _ in range(number_of_targets):
new_random_weight_list = list(dummy_weights)
shuffle(new_random_weight_list)
weights_matrix.append(new_random_weight_list)
# Benchmark solver
start_time = time.time()
distributor = FairDistributor(targets, objects, weights_matrix)
distributor.distribute()
elapsed_time = time.time() - start_time
# Output
print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format(
number_of_targets, number_of_objects, elapsed_time))
if __name__ == '__main__':
main()
|
Reformat random generator reformat code
|
Reformat random generator reformat code
|
Python
|
mit
|
Hackathonners/vania
|
---
+++
@@ -1,13 +1,14 @@
import sys
import time
from random import shuffle
-from FairDistributor import FairDistributor
+from vania.fair_distributor import FairDistributor
def main():
# User input for the number of targets and objects.
number_of_targets = int(sys.argv[1])
number_of_objects = int(sys.argv[2])
+
# Generate dummy lists for objects, targets and dummy matrix for weights
targets = ['target_{0}'.format(str(s))
for s in range(number_of_targets)]
@@ -19,10 +20,14 @@
new_random_weight_list = list(dummy_weights)
shuffle(new_random_weight_list)
weights_matrix.append(new_random_weight_list)
+
+ # Benchmark solver
start_time = time.time()
distributor = FairDistributor(targets, objects, weights_matrix)
distributor.distribute()
elapsed_time = time.time() - start_time
+
+ # Output
print('Number of Targets: {0}\nNumber of Objects: {1}\nTime elapsed: {2}'.format(
number_of_targets, number_of_objects, elapsed_time))
|
6c0be372323393bdd8f7c734f7cf5f6e5f14a1a2
|
tof_server/versioning.py
|
tof_server/versioning.py
|
"""Module for handling server and client versions"""
SERVER_VERSION = '0.1.0'
CLIENT_VERSIONS = ['0.5.0']
def validate(request):
for acceptable_version in CLIENT_VERSIONS:
if request.user_agent.string == 'ToF/' + acceptable_version:
return {
'status' : 'ok'
}
return {
'status' : 'error',
'code' : 403
}
|
"""Module for handling server and client versions"""
SERVER_VERSION = '0.1.0'
CLIENT_VERSIONS = ['0.5.0', '0.5.1']
def validate(request):
for acceptable_version in CLIENT_VERSIONS:
if request.user_agent.string == 'ToF/' + acceptable_version:
return {
'status' : 'ok'
}
return {
'status' : 'error',
'code' : 403
}
|
Add client beta version 0.5.1
|
Add client beta version 0.5.1
|
Python
|
mit
|
P1X-in/Tanks-of-Freedom-Server
|
---
+++
@@ -1,7 +1,7 @@
"""Module for handling server and client versions"""
SERVER_VERSION = '0.1.0'
-CLIENT_VERSIONS = ['0.5.0']
+CLIENT_VERSIONS = ['0.5.0', '0.5.1']
def validate(request):
|
6e535ccc43a090112ba140ff0eca533eed9c9935
|
kafka_influxdb/reader/kafka_reader.py
|
kafka_influxdb/reader/kafka_reader.py
|
# -*- coding: utf-8 -*-
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, reconnect_wait_time=2):
"""
Initialize Kafka reader
"""
self.host = host
self.port = port
self.group = group
self.topic = topic
self.reconnect_wait_time = reconnect_wait_time
# Initialized on read
self.kafka_client = None
self.consumer = None
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at %s...", connection)
self.kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(self.kafka_client,
self.group,
self.topic)
def read(self):
"""
Read from Kafka. Reconnect on error.
"""
while True:
for msg in self.handle_read():
yield msg
def handle_read(self):
"""
Yield messages from Kafka topic
"""
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception as e:
logging.error("Kafka error: %s.", e)
logging.error("Trying to reconnect to %s:%s", self.host, self.port)
time.sleep(self.reconnect_wait_time)
pass
|
# -*- coding: utf-8 -*-
import logging
import time
from kafka.client import KafkaClient
from kafka.consumer import SimpleConsumer
class KafkaReader(object):
def __init__(self, host, port, group, topic, reconnect_wait_time=2):
"""
Initialize Kafka reader
"""
self.host = host
self.port = port
self.group = group
self.topic = topic
self.reconnect_wait_time = reconnect_wait_time
# Initialized on read
self.kafka_client = None
self.consumer = None
def connect(self):
connection = "{0}:{1}".format(self.host, self.port)
logging.info("Connecting to Kafka at %s...", connection)
self.kafka_client = KafkaClient(connection)
self.consumer = SimpleConsumer(self.kafka_client,
self.group,
self.topic)
def read(self):
"""
Read from Kafka. Reconnect on error.
"""
while True:
for msg in self._handle_read():
yield msg
def _handle_read(self):
"""
Yield messages from Kafka topic
"""
try:
self.connect()
for raw_message in self.consumer:
yield raw_message.message.value
except Exception as e:
logging.error("Kafka error: %s.", e)
logging.error("Trying to reconnect to %s:%s", self.host, self.port)
time.sleep(self.reconnect_wait_time)
pass
|
Make handle_read a private method
|
Make handle_read a private method
|
Python
|
apache-2.0
|
mre/kafka-influxdb,mre/kafka-influxdb
|
---
+++
@@ -34,10 +34,10 @@
Read from Kafka. Reconnect on error.
"""
while True:
- for msg in self.handle_read():
+ for msg in self._handle_read():
yield msg
- def handle_read(self):
+ def _handle_read(self):
"""
Yield messages from Kafka topic
"""
|
a55bd9116114b546c06685e413209ab4279aaef5
|
genes/terraform/main.py
|
genes/terraform/main.py
|
from genes.mac.traits import is_osx
from genes.brew import brew
def main():
if is_osx():
brew.install()
|
from genes.mac.traits import is_osx
from genes.brew.command import Brew
def main():
if is_osx():
brew = Brew()
brew.install()
|
Change the brew instruction. kinda dumb
|
Change the brew instruction. kinda dumb
|
Python
|
mit
|
hatchery/genepool,hatchery/Genepool2
|
---
+++
@@ -1,8 +1,9 @@
from genes.mac.traits import is_osx
-from genes.brew import brew
+from genes.brew.command import Brew
def main():
if is_osx():
+ brew = Brew()
brew.install()
|
49fc55369d4755148d3db58c593d0b6f4d60582d
|
run_tests.py
|
run_tests.py
|
import sys
import os
import unittest
import subprocess
import time
cmd = 'python -m pretenders.server.server --host 127.0.0.1 --port 50000'
p = subprocess.Popen(cmd)
time.sleep(2)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')))
print("Run")
runner = unittest.TextTestRunner().run(unittest.defaultTestLoader.discover(start_dir='cutecoin.tests', pattern='test_*'))
print("Terminate")
p.terminate()
sys.exit(not runner.wasSuccessful())
|
import sys
import os
import unittest
import subprocess
import time
import shlex
cmd = 'python -m pretenders.server.server --host 127.0.0.1 --port 50000'
p = subprocess.Popen(shlex.split(cmd))
time.sleep(2)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')))
print("Run")
runner = unittest.TextTestRunner().run(unittest.defaultTestLoader.discover(start_dir='cutecoin.tests', pattern='test_*'))
print("Terminate")
p.terminate()
sys.exit(not runner.wasSuccessful())
|
Fix test run on linux / travis
|
Fix test run on linux / travis
|
Python
|
mit
|
ucoin-io/cutecoin,ucoin-io/cutecoin,ucoin-io/cutecoin
|
---
+++
@@ -3,10 +3,11 @@
import unittest
import subprocess
import time
+import shlex
cmd = 'python -m pretenders.server.server --host 127.0.0.1 --port 50000'
-p = subprocess.Popen(cmd)
+p = subprocess.Popen(shlex.split(cmd))
time.sleep(2)
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
|
6297eddaceb996a2c76825295af6a37e81d5c2fb
|
ain7/organizations/autocomplete_light_registry.py
|
ain7/organizations/autocomplete_light_registry.py
|
# -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2015 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(Office)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
# -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2015 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
import autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
Allow to autocomplete on office & organization names
|
Allow to autocomplete on office & organization names
|
Python
|
lgpl-2.1
|
ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org
|
---
+++
@@ -28,6 +28,9 @@
)
-autocomplete_light.register(Office)
+autocomplete_light.register(
+ Office,
+ search_fields=['name', 'organization__name'],
+)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
db7e0e2ddff42081bc46002c656611ce5a5ba7b5
|
allauth/socialaccount/providers/kakao/provider.py
|
allauth/socialaccount/providers/kakao/provider.py
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, nickname=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
from allauth.account.models import EmailAddress
from allauth.socialaccount.providers.base import ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class KakaoAccount(ProviderAccount):
@property
def properties(self):
return self.account.extra_data.get('properties')
def get_avatar_url(self):
return self.properties.get('profile_image')
def to_str(self):
dflt = super(KakaoAccount, self).to_str()
return self.properties.get('nickname', dflt)
class KakaoProvider(OAuth2Provider):
id = 'kakao'
name = 'Kakao'
account_class = KakaoAccount
def extract_uid(self, data):
return str(data['id'])
def extract_common_fields(self, data):
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
data = data['kakao_account']
email = data.get('email')
if email:
verified = data.get('is_email_verified')
# data['is_email_verified'] imply the email address is
# verified
ret.append(EmailAddress(email=email,
verified=verified,
primary=True))
return ret
provider_classes = [KakaoProvider]
|
Change field name from 'nickname' to 'username'
|
fix(kakao): Change field name from 'nickname' to 'username'
|
Python
|
mit
|
pennersr/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,rsalmaso/django-allauth,lukeburden/django-allauth,bittner/django-allauth,bittner/django-allauth,lukeburden/django-allauth,pennersr/django-allauth,bittner/django-allauth,rsalmaso/django-allauth,pennersr/django-allauth
|
---
+++
@@ -28,7 +28,7 @@
email = data['kakao_account'].get('email')
nickname = data['properties'].get('nickname')
- return dict(email=email, nickname=nickname)
+ return dict(email=email, username=nickname)
def extract_email_addresses(self, data):
ret = []
|
5fef3e5a5425ab71abb4c3b8a36a2273c9947b2e
|
bcbio/chipseq/__init__.py
|
bcbio/chipseq/__init__.py
|
from bcbio.ngsalign.bowtie2 import filter_multimappers
import bcbio.pipeline.datadict as dd
def clean_chipseq_alignment(data):
aligner = dd.get_aligner(data)
data["raw_bam"] = dd.get_work_bam(data)
if aligner:
assert aligner == "bowtie2", "ChIP-seq only supported for bowtie2."
unique_bam = filter_multimappers(dd.get_work_bam(data), data)
data["work_bam"] = unique_bam
return [[data]]
|
from bcbio.ngsalign.bowtie2 import filter_multimappers
import bcbio.pipeline.datadict as dd
def clean_chipseq_alignment(data):
aligner = dd.get_aligner(data)
data["raw_bam"] = dd.get_work_bam(data)
if aligner:
assert aligner == "bowtie2", "ChIP-seq only supported for bowtie2."
unique_bam = filter_multimappers(dd.get_work_bam(data), data)
data["work_bam"] = unique_bam
else:
logger.info("When BAM file is given, bcbio skips multimappers removal.")
logger.info("If BAM is not cleaned for peak calling, can result in downstream errors.")
return [[data]]
|
Add warning when aligner is false
|
Chipseq: Add warning when aligner is false
|
Python
|
mit
|
biocyberman/bcbio-nextgen,biocyberman/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen,lbeltrame/bcbio-nextgen,brainstorm/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,biocyberman/bcbio-nextgen,vladsaveliev/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,lbeltrame/bcbio-nextgen,brainstorm/bcbio-nextgen,a113n/bcbio-nextgen,brainstorm/bcbio-nextgen,chapmanb/bcbio-nextgen,vladsaveliev/bcbio-nextgen
|
---
+++
@@ -8,4 +8,7 @@
assert aligner == "bowtie2", "ChIP-seq only supported for bowtie2."
unique_bam = filter_multimappers(dd.get_work_bam(data), data)
data["work_bam"] = unique_bam
+ else:
+ logger.info("When BAM file is given, bcbio skips multimappers removal.")
+ logger.info("If BAM is not cleaned for peak calling, can result in downstream errors.")
return [[data]]
|
5b616f5b3d605b1831d4ca8ca0a9be561f399a89
|
falmer/events/admin.py
|
falmer/events/admin.py
|
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
pass
|
from django.contrib import admin
from django.contrib.admin import register
from falmer.events.models import Event, MSLEvent
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
list_display = ('title', 'start_time', 'end_time', )
@register(MSLEvent)
class MSLEventModelAdmin(admin.ModelAdmin):
pass
|
Improve list display of events
|
Improve list display of events
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
---
+++
@@ -1,9 +1,13 @@
from django.contrib import admin
from django.contrib.admin import register
-from falmer.events.models import Event
+from falmer.events.models import Event, MSLEvent
@register(Event)
class EventModelAdmin(admin.ModelAdmin):
+ list_display = ('title', 'start_time', 'end_time', )
+
+@register(MSLEvent)
+class MSLEventModelAdmin(admin.ModelAdmin):
pass
|
cbb6f7495123f1745284d0b098dcfaae0b31c5f3
|
bin/commands/utils/git.py
|
bin/commands/utils/git.py
|
"""A collection of common git actions."""
from subprocess import check_output, PIPE, Popen, STDOUT
def is_valid_reference(reference):
"""Determines if a reference is valid.
:param str reference: name of the reference to validate
:return bool: whether or not the reference is valid
"""
show_ref_proc = Popen(['git', 'show-ref', '--quiet', reference])
show_ref_proc.communicate()
return not show_ref_proc.returncode
def is_commit(object):
"""Determines if an object is a commit.
:param object: a git object
:return bool: whether or not the object is a commit object
"""
cat_file_proc = Popen(['git', 'cat-file', '-t', object], stdout=PIPE, stderr=STDOUT)
object_type = cat_file_proc.communicate()[0].strip()
return not cat_file_proc.returncode and object_type == 'commit'
def current_branch():
"""Returns the current branch.
:return str or unicode: the name of the current branch
"""
return check_output(('git', 'rev-parse', '--abbrev-ref', 'HEAD')).strip()
|
"""A collection of common git actions."""
import os
from subprocess import check_output, PIPE, Popen, STDOUT
def is_valid_reference(reference):
"""Determines if a reference is valid.
:param str reference: name of the reference to validate
:return bool: whether or not the reference is valid
"""
assert isinstance(reference, str), "'reference' must be a str. Given: " + type(reference).__name__
show_ref_proc = Popen(['git', 'show-ref', '--quiet', reference])
show_ref_proc.communicate()
return not show_ref_proc.returncode
def is_commit(object):
"""Determines if an object is a commit.
:param str object: a git object
:return bool: whether or not the object is a commit object
"""
assert isinstance(object, str), "'object' must be a str. Given: " + type(object).__name__
with open(os.devnull, 'w') as dev_null:
cat_file_proc = Popen(['git', 'cat-file', '-t', object], stdout=PIPE, stderr=dev_null)
object_type = cat_file_proc.communicate()[0].strip()
return not cat_file_proc.returncode and object_type == 'commit'
def current_branch():
"""Returns the current branch.
:return str or unicode: the name of the current branch
"""
return check_output(('git', 'rev-parse', '--abbrev-ref', 'HEAD')).strip()
|
Add type checking and piping to /dev/null
|
Add type checking and piping to /dev/null
|
Python
|
mit
|
Brickstertwo/git-commands
|
---
+++
@@ -1,5 +1,6 @@
"""A collection of common git actions."""
+import os
from subprocess import check_output, PIPE, Popen, STDOUT
@@ -11,6 +12,8 @@
:return bool: whether or not the reference is valid
"""
+ assert isinstance(reference, str), "'reference' must be a str. Given: " + type(reference).__name__
+
show_ref_proc = Popen(['git', 'show-ref', '--quiet', reference])
show_ref_proc.communicate()
return not show_ref_proc.returncode
@@ -19,14 +22,17 @@
def is_commit(object):
"""Determines if an object is a commit.
- :param object: a git object
+ :param str object: a git object
:return bool: whether or not the object is a commit object
"""
- cat_file_proc = Popen(['git', 'cat-file', '-t', object], stdout=PIPE, stderr=STDOUT)
- object_type = cat_file_proc.communicate()[0].strip()
- return not cat_file_proc.returncode and object_type == 'commit'
+ assert isinstance(object, str), "'object' must be a str. Given: " + type(object).__name__
+
+ with open(os.devnull, 'w') as dev_null:
+ cat_file_proc = Popen(['git', 'cat-file', '-t', object], stdout=PIPE, stderr=dev_null)
+ object_type = cat_file_proc.communicate()[0].strip()
+ return not cat_file_proc.returncode and object_type == 'commit'
def current_branch():
|
25e2c37bb9dc17f0c10ae744b1554b94c4e5a7ff
|
doj/monkey/__init__.py
|
doj/monkey/__init__.py
|
# -*- coding: utf-8 -*-
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
|
# -*- coding: utf-8 -*-
import doj.monkey.django_utils_functional_lazy
import doj.monkey.django_http_response_streaminghttpresponse
import doj.monkey.inspect_getcallargs
def install_monkey_patches():
# Make sure we install monkey patches only once
if not getattr(install_monkey_patches, 'installed', False):
setattr(install_monkey_patches, 'installed', True)
doj.monkey.django_utils_functional_lazy.install()
doj.monkey.django_http_response_streaminghttpresponse.install()
doj.monkey.inspect_getcallargs.install()
|
Make sure we install monkey patches only once
|
Make sure we install monkey patches only once
|
Python
|
bsd-3-clause
|
beachmachine/django-jython
|
---
+++
@@ -6,6 +6,10 @@
def install_monkey_patches():
- doj.monkey.django_utils_functional_lazy.install()
- doj.monkey.django_http_response_streaminghttpresponse.install()
- doj.monkey.inspect_getcallargs.install()
+ # Make sure we install monkey patches only once
+ if not getattr(install_monkey_patches, 'installed', False):
+ setattr(install_monkey_patches, 'installed', True)
+
+ doj.monkey.django_utils_functional_lazy.install()
+ doj.monkey.django_http_response_streaminghttpresponse.install()
+ doj.monkey.inspect_getcallargs.install()
|
109018326b317a160e0ba555b23b7b4401f44ed3
|
website/views.py
|
website/views.py
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('application_form'))
def test404(request):
return render(request, '404.html')
|
from django.shortcuts import render
from django.http import HttpResponseRedirect
from django.core.urlresolvers import reverse
from news.models import Article, Event
from door.models import DoorStatus
from datetime import datetime
from itertools import chain
def index(request):
number_of_news = 3
# Sorts the news to show the events nearest in future and then fill in with the newest articles
event_list = Event.objects.filter(time_end__gte=datetime.now())[0:number_of_news:-1]
article_list = Article.objects.order_by('-pub_date')[0:number_of_news - len(event_list)]
news_list = list(chain(event_list, article_list))
try:
door_status = DoorStatus.objects.get(name='hackerspace').status
except DoorStatus.DoesNotExist:
door_status = True
context = {
'news_list': news_list,
'door_status': door_status,
}
return render(request, 'index.html', context)
def opptak(request):
return HttpResponseRedirect(reverse('article', args=[6]))
def test404(request):
return render(request, '404.html')
|
Change redirect to application redirect
|
Change redirect to application redirect
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
---
+++
@@ -28,7 +28,7 @@
def opptak(request):
- return HttpResponseRedirect(reverse('application_form'))
+ return HttpResponseRedirect(reverse('article', args=[6]))
def test404(request):
|
23450c05921ecadedc03a273804e8e6ddaa5439a
|
meetup_facebook_bot/models/speaker.py
|
meetup_facebook_bot/models/speaker.py
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT, unique=True)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.facebook_id
|
from sqlalchemy import Column, BIGINT, String, Integer
from meetup_facebook_bot.models.base import Base
class Speaker(Base):
__tablename__ = 'speakers'
id = Column(Integer, primary_key=True, autoincrement=True)
page_scoped_id = Column(BIGINT, unique=True)
name = Column(String(128), nullable=False)
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
return '<Speaker %r>' % self.id
|
Fix repr calling unknown attribute
|
Fix repr calling unknown attribute
|
Python
|
mit
|
Stark-Mountain/meetup-facebook-bot,Stark-Mountain/meetup-facebook-bot
|
---
+++
@@ -11,4 +11,4 @@
token = Column(String(128), unique=True, nullable=False)
def __repr__(self):
- return '<Speaker %r>' % self.facebook_id
+ return '<Speaker %r>' % self.id
|
198a941c8c71802b72c33f5ef89d1d4d46e52eac
|
scripts/fetch_all_urls_to_disk.py
|
scripts/fetch_all_urls_to_disk.py
|
import urllib
import os
import hashlib
with open('media_urls.txt','r') as f:
for url in f:
imagename = os.path.basename(url)
m = hashlib.md5(url).hexdigest()
if '.jpg' in url:
shortname = m + '.jpg'
elif '.png' in url:
shortname = m + '.png'
else:
print 'no jpg nor png'
print shortname
with open(shortname, 'wb') as imgfile:
imgfile.write(urllib.urlopen(url).read())
imgfile.close()
|
import urllib
import os
import hashlib
with open('media_urls.txt','r') as f:
for url in f:
imagename = os.path.basename(url)
m = hashlib.md5(url).hexdigest()
if '.jpg' in url:
shortname = m + '.jpg'
elif '.png' in url:
shortname = m + '.png'
else:
print 'no jpg nor png'
continue
print shortname
with open(shortname, 'wb') as imgfile:
imgfile.write(urllib.urlopen(url).read())
imgfile.close()
|
Add continue when no extension ".jpg" nor ".png" is found in URL
|
Add continue when no extension ".jpg" nor ".png" is found in URL
|
Python
|
mit
|
mixbe/kerstkaart2013,mixbe/kerstkaart2013
|
---
+++
@@ -14,6 +14,7 @@
shortname = m + '.png'
else:
print 'no jpg nor png'
+ continue
print shortname
with open(shortname, 'wb') as imgfile:
|
1c1c7dd151b3b7894fff74b31c15bded4ac4dc96
|
lintrain/solvers/ridgeregression.py
|
lintrain/solvers/ridgeregression.py
|
from solver import Solver
import numpy as np
class RidgeRegression(Solver):
"""
Analytically performs ridge regression, where coefficients are regularized by learning rate alpha. This constrains
coefficients and can be effective in situations where over- or under-fitting arise.
Based off of:
https://gist.github.com/diogojc/1519756
"""
alpha = 0.1
intercept = True
def __init__(self):
Solver.__init__(self)
def calculate_parameters(self, x, y):
g = self.alpha * np.eye(x.shape[1])
# cancel out intercept term (do not regularize intercept
if self.intercept:
idx = np.all(x == 1, axis=0)
g[idx, idx] = 0
fit = np.dot(np.linalg.inv(np.dot(x.T, x) + np.dot(g.T, g)),
np.dot(x.T, y))
return fit
def apply_parameters(self, x, params):
return np.dot(x, params)
|
from solver import Solver
import numpy as np
class RidgeRegression(Solver):
"""
Analytically performs ridge regression, where coefficients are regularized by learning rate alpha. This constrains
coefficients and can be effective in situations where over- or under-fitting arise. Parameters `alpha`
is the regularization constant (alpha of 0 is least squares, as alpha increases, coefficients are increasingly
constrained). Parameter `intercept` (defaults to true) causes an intercept column (all ones) to automatically be
detected and excluded from regularization.
Based off of:
https://gist.github.com/diogojc/1519756
"""
def __init__(self, alpha=0.1, intercept=True):
Solver.__init__(self)
# parameters
self.alpha = alpha # regularization constant
self.intercept = intercept # automatically gues intercept and do not regularize
def calculate_parameters(self, x, y):
g = self.alpha * np.eye(x.shape[1])
# cancel out intercept term (do not regularize intercept
if self.intercept:
idx = np.all(x == 1, axis=0)
g[idx, idx] = 0
fit = np.dot(np.linalg.inv(np.dot(x.T, x) + np.dot(g.T, g)),
np.dot(x.T, y))
return fit
def apply_parameters(self, x, params):
return np.dot(x, params)
|
Allow setting ridge regression parameters during creation
|
Allow setting ridge regression parameters during creation
|
Python
|
mit
|
nathanntg/lin-train,nathanntg/lin-train
|
---
+++
@@ -5,16 +5,21 @@
class RidgeRegression(Solver):
"""
Analytically performs ridge regression, where coefficients are regularized by learning rate alpha. This constrains
- coefficients and can be effective in situations where over- or under-fitting arise.
+ coefficients and can be effective in situations where over- or under-fitting arise. Parameters `alpha`
+ is the regularization constant (alpha of 0 is least squares, as alpha increases, coefficients are increasingly
+ constrained). Parameter `intercept` (defaults to true) causes an intercept column (all ones) to automatically be
+ detected and excluded from regularization.
+
Based off of:
https://gist.github.com/diogojc/1519756
"""
- alpha = 0.1
- intercept = True
+ def __init__(self, alpha=0.1, intercept=True):
+ Solver.__init__(self)
- def __init__(self):
- Solver.__init__(self)
+ # parameters
+ self.alpha = alpha # regularization constant
+ self.intercept = intercept # automatically gues intercept and do not regularize
def calculate_parameters(self, x, y):
g = self.alpha * np.eye(x.shape[1])
|
f8b4f4a2c5a7f529816f78344509a3536a0f3254
|
datapipe/targets/filesystem.py
|
datapipe/targets/filesystem.py
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def is_damaged(self):
mem = self.stored()
if mem is None or not 'timestamp' in mem:
return True
return self._memory['timestamp'] > mem['timestamp']
|
import os
from ..target import Target
class LocalFile(Target):
def __init__(self, path):
self._path = path
super(LocalFile, self).__init__()
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
def identifier(self):
return self._path
def exists(self):
return os.path.exists(self._path)
def path(self):
return self._path
def store(self, batch=None):
if self.exists():
self._memory['timestamp'] = os.path.getmtime(self._path)
else:
self._memory['timestamp'] = 0
super(LocalFile, self).store(batch)
def open(self, *args, **kwargs):
return open(self._path, *args, **kwargs)
def is_damaged(self):
mem = self.stored()
if mem is None or not 'timestamp' in mem:
return True
if not self.exists():
return True
return self._memory['timestamp'] > mem['timestamp']
|
Fix another situation where targets didn't get rebuilt
|
Fix another situation where targets didn't get rebuilt
|
Python
|
mit
|
ibab/datapipe
|
---
+++
@@ -34,5 +34,8 @@
if mem is None or not 'timestamp' in mem:
return True
+ if not self.exists():
+ return True
+
return self._memory['timestamp'] > mem['timestamp']
|
7bb93bfdf2b75ba8df0983d058854a1d00d75c16
|
geotrek/feedback/tests/test_commands.py
|
geotrek/feedback/tests/test_commands.py
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from geotrek.feedback.models import Report
from geotrek.feedback.factories import ReportFactory
class TestRemoveEmailsOlders(TestCase):
"""Test command erase_emails, if older emails are removed"""
def setUp(self):
# Create two reports
self.old_report = ReportFactory()
self.recent_report = ReportFactory()
# Modify date_insert for old_report
one_year_one_day = timezone.timedelta(days=370)
self.old_report.date_insert = timezone.now() - one_year_one_day
self.old_report.save()
def test_erase_old_emails(self):
output = StringIO()
call_command('erase_emails', stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "")
self.assertEqual(old_report.__str__(), "Anonymized report")
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from geotrek.feedback.models import Report
from geotrek.feedback.factories import ReportFactory
class TestRemoveEmailsOlders(TestCase):
"""Test command erase_emails, if older emails are removed"""
def setUp(self):
# Create two reports
self.old_report = ReportFactory(email="to_erase@you.com")
self.recent_report = ReportFactory(email="yeah@you.com")
# Modify date_insert for old_report
one_year_one_day = timezone.timedelta(days=370)
self.old_report.date_insert = timezone.now() - one_year_one_day
self.old_report.save()
def test_erase_old_emails(self):
output = StringIO()
call_command('erase_emails', stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "")
self.assertEqual(old_report.__str__(), "Anonymous report")
def test_dry_run_command(self):
"""Test if dry_run mode keeps emails"""
output = StringIO()
call_command('erase_emails', dry_run=True, stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "to_erase@you.com")
|
Test dry run mode in erase_mail
|
Test dry run mode in erase_mail
|
Python
|
bsd-2-clause
|
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin
|
---
+++
@@ -13,8 +13,8 @@
def setUp(self):
# Create two reports
- self.old_report = ReportFactory()
- self.recent_report = ReportFactory()
+ self.old_report = ReportFactory(email="to_erase@you.com")
+ self.recent_report = ReportFactory(email="yeah@you.com")
# Modify date_insert for old_report
one_year_one_day = timezone.timedelta(days=370)
@@ -26,4 +26,11 @@
call_command('erase_emails', stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "")
- self.assertEqual(old_report.__str__(), "Anonymized report")
+ self.assertEqual(old_report.__str__(), "Anonymous report")
+
+ def test_dry_run_command(self):
+ """Test if dry_run mode keeps emails"""
+ output = StringIO()
+ call_command('erase_emails', dry_run=True, stdout=output)
+ old_report = Report.objects.get(id=self.old_report.id)
+ self.assertEqual(old_report.email, "to_erase@you.com")
|
924766a6b56aba3a462600a70e5f4b7b322c677e
|
test/test_utils.py
|
test/test_utils.py
|
from piper.utils import DotDict
from piper.utils import dynamic_load
import pytest
class TestDotDict(object):
def test_get_nonexistant_raises_keyerror(self):
with pytest.raises(KeyError):
dd = DotDict({})
dd.does_not_exist
def test_get_item(self):
dd = DotDict({'danger': 'zone'})
assert dd.danger == 'zone'
def test_get_item_dict_access(self):
dd = DotDict({'danger': 'zone'})
assert dd['danger'] == 'zone'
def test_dict_items_become_dotdicts(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd.highway, DotDict) is True
def test_nested_access(self):
dd = DotDict({'highway': {'danger': {'zone': True}}})
assert dd.highway.danger.zone is True
class TestDynamicLoad(object):
def test_proper_load(self):
cls = dynamic_load('piper.utils.DotDict')
assert cls is DotDict
def test_nonexistant_target(self):
with pytest.raises(ImportError):
dynamic_load('gammaray.empire.Avalon')
|
from piper.utils import DotDict
from piper.utils import dynamic_load
import pytest
class TestDotDict(object):
def test_get_nonexistant_raises_keyerror(self):
with pytest.raises(KeyError):
dd = DotDict({})
dd.does_not_exist
def test_get_item(self):
dd = DotDict({'danger': 'zone'})
assert dd.danger == 'zone'
def test_get_item_dict_access(self):
dd = DotDict({'danger': 'zone'})
assert dd['danger'] == 'zone'
def test_dict_items_become_dotdicts(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd.highway, DotDict) is True
def test_dict_items_become_dotdicts_when_using_dict_access(self):
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd['highway'], DotDict) is True
def test_nested_access(self):
dd = DotDict({'highway': {'danger': {'zone': True}}})
assert dd.highway.danger.zone is True
class TestDynamicLoad(object):
def test_proper_load(self):
cls = dynamic_load('piper.utils.DotDict')
assert cls is DotDict
def test_nonexistant_target(self):
with pytest.raises(ImportError):
dynamic_load('gammaray.empire.Avalon')
|
Add extra DotDict subscriptability test
|
Add extra DotDict subscriptability test
|
Python
|
mit
|
thiderman/piper
|
---
+++
@@ -22,6 +22,10 @@
dd = DotDict({'highway': {'danger': 'zone'}})
assert isinstance(dd.highway, DotDict) is True
+ def test_dict_items_become_dotdicts_when_using_dict_access(self):
+ dd = DotDict({'highway': {'danger': 'zone'}})
+ assert isinstance(dd['highway'], DotDict) is True
+
def test_nested_access(self):
dd = DotDict({'highway': {'danger': {'zone': True}}})
assert dd.highway.danger.zone is True
|
0515f71d861529262aada1ad416c626277e11d9e
|
django_excel_to_model/forms.py
|
django_excel_to_model/forms.py
|
from django.utils.translation import ugettext_lazy as _
from django import forms
from models import ExcelImportTask
from django.forms import ModelForm
class ExcelFormatTranslateForm(forms.Form):
# title = forms.CharField(max_length=50)
import_file = forms.FileField(
label=_('File to import')
)
header_row_numbered_from_1 = forms.IntegerField()
spreadsheet_numbered_from_1 = forms.IntegerField()
class_name = forms.CharField()
is_create_app_now = forms.BooleanField(required=False)
class ExcelImportTaskForm(ModelForm):
class Meta:
model = ExcelImportTask
fields = ['excel_file', 'content', "header_row_numbered_from_1", "spreadsheet_numbered_from_1"]
is_import_now = forms.BooleanField(required=False)
|
from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from django import forms
from models import ExcelImportTask
from django.forms import ModelForm
class ExcelFormatTranslateForm(forms.Form):
# title = forms.CharField(max_length=50)
import_file = forms.FileField(
label=_('File to import')
)
header_row_numbered_from_1 = forms.IntegerField()
spreadsheet_numbered_from_1 = forms.IntegerField()
class_name = forms.CharField()
is_create_app_now = forms.BooleanField(required=False)
class ExcelImportTaskForm(ModelForm):
content = forms.ModelChoiceField(queryset=ContentType.objects.order_by('model'))
class Meta:
model = ExcelImportTask
fields = ['excel_file', 'content', "header_row_numbered_from_1", "spreadsheet_numbered_from_1"]
is_import_now = forms.BooleanField(required=False)
|
Sort content for data import.
|
Sort content for data import.
|
Python
|
bsd-3-clause
|
weijia/django-excel-to-model,weijia/django-excel-to-model
|
---
+++
@@ -1,3 +1,4 @@
+from django.contrib.contenttypes.models import ContentType
from django.utils.translation import ugettext_lazy as _
from django import forms
from models import ExcelImportTask
@@ -16,8 +17,10 @@
class ExcelImportTaskForm(ModelForm):
+ content = forms.ModelChoiceField(queryset=ContentType.objects.order_by('model'))
+
class Meta:
model = ExcelImportTask
fields = ['excel_file', 'content', "header_row_numbered_from_1", "spreadsheet_numbered_from_1"]
+
is_import_now = forms.BooleanField(required=False)
-
|
ef9d7cbbd79078e494faed730318a18f995f3a78
|
cla_public/libs/zendesk.py
|
cla_public/libs/zendesk.py
|
# -*- coding: utf-8 -*-
"Zendesk"
import json
import requests
from flask import current_app
TICKETS_URL = 'https://ministryofjustice.zendesk.com/api/v2/tickets.json'
def create_ticket(payload):
"Create a new Zendesk ticket"
return requests.post(
TICKETS_URL,
data=json.dumps(payload),
auth=(
'{username}/token'.format(
username=current_app.config['ZENDESK_API_USERNAME']),
current_app.config['ZENDESK_API_TOKEN']
),
headers={'content-type': 'application/json'})
|
# -*- coding: utf-8 -*-
"Zendesk"
import json
import requests
from flask import current_app
TICKETS_URL = 'https://ministryofjustice.zendesk.com/api/v2/tickets.json'
def zendesk_auth():
return (
'{username}/token'.format(
username=current_app.config['ZENDESK_API_USERNAME']),
current_app.config['ZENDESK_API_TOKEN']
)
def create_ticket(payload):
"Create a new Zendesk ticket"
return requests.post(
TICKETS_URL,
data=json.dumps(payload),
auth=zendesk_auth(),
headers={'content-type': 'application/json'})
def tickets():
"List Zendesk tickets"
return requests.get(
TICKETS_URL,
auth=zendesk_auth())
|
Refactor Zendesk client code for smoketest
|
Refactor Zendesk client code for smoketest
|
Python
|
mit
|
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
|
---
+++
@@ -9,14 +9,27 @@
TICKETS_URL = 'https://ministryofjustice.zendesk.com/api/v2/tickets.json'
+def zendesk_auth():
+ return (
+ '{username}/token'.format(
+ username=current_app.config['ZENDESK_API_USERNAME']),
+ current_app.config['ZENDESK_API_TOKEN']
+ )
+
+
def create_ticket(payload):
"Create a new Zendesk ticket"
+
return requests.post(
TICKETS_URL,
data=json.dumps(payload),
- auth=(
- '{username}/token'.format(
- username=current_app.config['ZENDESK_API_USERNAME']),
- current_app.config['ZENDESK_API_TOKEN']
- ),
+ auth=zendesk_auth(),
headers={'content-type': 'application/json'})
+
+
+def tickets():
+ "List Zendesk tickets"
+
+ return requests.get(
+ TICKETS_URL,
+ auth=zendesk_auth())
|
0722b517f5b5b9a84b7521b6b7d350cbc6537948
|
src/core/models.py
|
src/core/models.py
|
from django.db import models
class BigForeignKey(models.ForeignKey):
def db_type(self, connection):
""" Adds support for foreign keys to big integers as primary keys.
"""
presumed_type = super().db_type(connection)
if presumed_type == 'integer':
return 'bigint'
return presumed_type
|
from django.apps import apps
from django.db import models
class BigForeignKey(models.ForeignKey):
def db_type(self, connection):
""" Adds support for foreign keys to big integers as primary keys.
Django's AutoField is actually an IntegerField (SQL integer field),
but in some cases we are using bigint on PostgreSQL without Django
knowing it. So we continue to trick Django here, swapping its field
type detection, and just tells it to use bigint.
:seealso: Migrations in the ``postgres`` app.
"""
presumed_type = super().db_type(connection)
if apps.is_installed('postgres') and presumed_type == 'integer':
return 'bigint'
return presumed_type
|
Add some explaination on BigForeignKey
|
Add some explaination on BigForeignKey
|
Python
|
mit
|
uranusjr/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016
|
---
+++
@@ -1,11 +1,19 @@
+from django.apps import apps
from django.db import models
class BigForeignKey(models.ForeignKey):
def db_type(self, connection):
""" Adds support for foreign keys to big integers as primary keys.
+
+ Django's AutoField is actually an IntegerField (SQL integer field),
+ but in some cases we are using bigint on PostgreSQL without Django
+ knowing it. So we continue to trick Django here, swapping its field
+ type detection, and just tells it to use bigint.
+
+ :seealso: Migrations in the ``postgres`` app.
"""
presumed_type = super().db_type(connection)
- if presumed_type == 'integer':
+ if apps.is_installed('postgres') and presumed_type == 'integer':
return 'bigint'
return presumed_type
|
e6fcb5122b7132e03257ac5c883f5e44ccdd1ef5
|
quokka/ext/before_request.py
|
quokka/ext/before_request.py
|
# coding: utf-8
def configure(app):
@app.before_first_request
def initialize():
print "Called only once, when the first request comes in"
|
# coding: utf-8
from quokka.core.models import Channel
def configure(app):
@app.before_first_request
def initialize():
print "Called only once, when the first request comes in"
if not Channel.objects.count():
# Create homepage if it does not exists
Channel.objects.create(
title="home",
slug="home",
description="App homepage",
is_homepage=True,
include_in_rss=True,
indexable=True,
show_in_menu=True,
canonical_url="/",
order=0,
published=True,
)
|
Create channel homepage if not exists in before request
|
Create channel homepage if not exists in before request
|
Python
|
mit
|
fdumpling/quokka,lnick/quokka,CoolCloud/quokka,Ckai1991/quokka,felipevolpone/quokka,romulocollopy/quokka,cbeloni/quokka,ChengChiongWah/quokka,cbeloni/quokka,CoolCloud/quokka,romulocollopy/quokka,fdumpling/quokka,fdumpling/quokka,maurobaraldi/quokka,maurobaraldi/quokka,Ckai1991/quokka,lnick/quokka,ChengChiongWah/quokka,wushuyi/quokka,CoolCloud/quokka,felipevolpone/quokka,alexandre/quokka,CoolCloud/quokka,Ckai1991/quokka,felipevolpone/quokka,lnick/quokka,Ckai1991/quokka,lnick/quokka,maurobaraldi/quokka,ChengChiongWah/quokka,maurobaraldi/quokka,romulocollopy/quokka,wushuyi/quokka,wushuyi/quokka,cbeloni/quokka,fdumpling/quokka,cbeloni/quokka,wushuyi/quokka,alexandre/quokka,felipevolpone/quokka,ChengChiongWah/quokka,romulocollopy/quokka
|
---
+++
@@ -1,7 +1,23 @@
# coding: utf-8
+
+from quokka.core.models import Channel
def configure(app):
@app.before_first_request
def initialize():
print "Called only once, when the first request comes in"
+ if not Channel.objects.count():
+ # Create homepage if it does not exists
+ Channel.objects.create(
+ title="home",
+ slug="home",
+ description="App homepage",
+ is_homepage=True,
+ include_in_rss=True,
+ indexable=True,
+ show_in_menu=True,
+ canonical_url="/",
+ order=0,
+ published=True,
+ )
|
2eb9220ee2043c2355682cab9094c8cd201bc2f7
|
yolk/__init__.py
|
yolk/__init__.py
|
"""
__init__.py
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__docformat__ = 'restructuredtext'
__version__ = '0.5'
|
"""
__init__.py
Author: Rob Cakebread <cakebread at gmail>
License : BSD
"""
__docformat__ = 'restructuredtext'
__version__ = '0.5.1'
|
Increment patch version to 0.5.1
|
Increment patch version to 0.5.1
|
Python
|
bsd-3-clause
|
myint/yolk,myint/yolk
|
---
+++
@@ -10,4 +10,4 @@
"""
__docformat__ = 'restructuredtext'
-__version__ = '0.5'
+__version__ = '0.5.1'
|
266968b5f5188c526506782a47ea03aa3d32bf7a
|
kb/core.py
|
kb/core.py
|
import abc
from collections import namedtuple
Key = namedtuple('Key', ['x', 'y'])
class Keyboard(metaclass=abc.ABCMeta):
def __init__(self):
pass
@abc.abstractproperty
def keys(self):
""" Return the keys of this keyboard.
:returns: An iterable of keys
"""
pass
class Actor(metaclass=abc.ABCMeta):
def __init__(self):
pass
@abc.abstractmethod
def evaluate(self, keyboard, layout, corpus):
""" Calculate how much effort the actor has to exert to type the corpus using the layout.
:param keyboard: The keyboard the layout is applied to.
:type keyboard: Keyboard
:param layout: The layout function to be evaluated.
:type layout: dict
:param corpus: The corpus used for evaluation.
:type corpus: iterable of strings
:returns: A floating point value in range [0, +inf] where lower values indicate less effort
"""
pass
|
import abc
from collections import namedtuple
Key = namedtuple('Key', ['y', 'x'])
class Keyboard(metaclass=abc.ABCMeta):
def __init__(self):
pass
@abc.abstractproperty
def keys(self):
""" Return the keys of this keyboard.
:returns: An iterable of keys
"""
pass
class Actor(metaclass=abc.ABCMeta):
def __init__(self):
pass
@abc.abstractmethod
def evaluate(self, keyboard, layout, corpus):
""" Calculate how much effort the actor has to exert to type the corpus using the layout.
:param keyboard: The keyboard the layout is applied to.
:type keyboard: Keyboard
:param layout: The layout function to be evaluated.
:type layout: dict
:param corpus: The corpus used for evaluation.
:type corpus: iterable of strings
:returns: A floating point value in range [0, +inf] where lower values indicate less effort
"""
pass
|
Swap order of Key x and y to make Keys sortable
|
Swap order of Key x and y to make Keys sortable
|
Python
|
mit
|
Cyanogenoid/kb-project
|
---
+++
@@ -2,7 +2,7 @@
from collections import namedtuple
-Key = namedtuple('Key', ['x', 'y'])
+Key = namedtuple('Key', ['y', 'x'])
class Keyboard(metaclass=abc.ABCMeta):
|
cf626539192ff60a0c2ffd06c61fb35f2d8861a1
|
tests/test_data.py
|
tests/test_data.py
|
from unittest import TestCase
from chatterbot_corpus import corpus
class CorpusUtilsTestCase(TestCase):
"""
This test case is designed to make sure that all
corpus data adheres to a few general rules.
"""
def test_character_count(self):
"""
Test that no line in the corpus exceeds the
maximum number of characters.
"""
from chatterbot_corpus.corpus import DIALOG_MAXIMUM_CHARACTER_LENGTH
corpora = corpus.load_corpus('chatterbot.corpus')
for conversations in corpora:
for conversation in conversations:
for statement in conversation:
if len(statement) > DIALOG_MAXIMUM_CHARACTER_LENGTH:
self.fail(
u'"{}" cannot be longer than {} characters'.format(
statement,
DIALOG_MAXIMUM_CHARACTER_LENGTH
)
)
|
from unittest import TestCase
from chatterbot_corpus import corpus
class CorpusUtilsTestCase(TestCase):
"""
This test case is designed to make sure that all
corpus data adheres to a few general rules.
"""
def test_character_count(self):
"""
Test that no line in the corpus exceeds the
maximum number of characters.
"""
from chatterbot_corpus.corpus import DIALOG_MAXIMUM_CHARACTER_LENGTH
corpora = corpus.load_corpus('chatterbot.corpus')
for conversations in corpora:
for conversation in conversations:
for statement in conversation:
if len(statement) > DIALOG_MAXIMUM_CHARACTER_LENGTH:
self.fail(
'"{}" cannot be longer than {} characters'.format(
statement,
DIALOG_MAXIMUM_CHARACTER_LENGTH
)
)
def test_conversation_format(self):
corpora = corpus.load_corpus('chatterbot.corpus')
for conversations in corpora:
for conversation in conversations:
for text in conversation:
if not isinstance(text, str):
self.fail('"{}" must be a string, not {}.'.format(
str(text),
type(text)
))
|
Add test for data type validation
|
Add test for data type validation
|
Python
|
bsd-3-clause
|
gunthercox/chatterbot-corpus
|
---
+++
@@ -22,8 +22,20 @@
for statement in conversation:
if len(statement) > DIALOG_MAXIMUM_CHARACTER_LENGTH:
self.fail(
- u'"{}" cannot be longer than {} characters'.format(
+ '"{}" cannot be longer than {} characters'.format(
statement,
DIALOG_MAXIMUM_CHARACTER_LENGTH
)
)
+
+ def test_conversation_format(self):
+ corpora = corpus.load_corpus('chatterbot.corpus')
+
+ for conversations in corpora:
+ for conversation in conversations:
+ for text in conversation:
+ if not isinstance(text, str):
+ self.fail('"{}" must be a string, not {}.'.format(
+ str(text),
+ type(text)
+ ))
|
876ff2e147aaa751d2ab2f5423b30fcfcc02fba9
|
tests/test_main.py
|
tests/test_main.py
|
import os
import sys
import pytest
from hypothesis_auto import auto_pytest_magic
from isort import main
auto_pytest_magic(main.sort_imports)
def test_is_python_file():
assert main.is_python_file("file.py")
assert main.is_python_file("file.pyi")
assert main.is_python_file("file.pyx")
assert not main.is_python_file("file.pyc")
assert not main.is_python_file("file.txt")
assert not main.is_python_file("file.pex")
@pytest.mark.skipif(sys.platform == "win32", reason="cannot create fifo file on Windows platform")
def test_is_python_file_fifo(tmpdir):
fifo_file = os.path.join(tmpdir, "fifo_file")
os.mkfifo(fifo_file)
assert not main.is_python_file(fifo_file)
def test_isort_command():
"""Ensure ISortCommand got registered, otherwise setuptools error must have occured"""
assert main.ISortCommand
|
import os
import sys
import pytest
from hypothesis_auto import auto_pytest_magic
from isort import main
from isort.settings import DEFAULT_CONFIG
auto_pytest_magic(main.sort_imports)
def test_iter_source_code(tmpdir):
tmp_file = tmpdir.join("file.py")
tmp_file.write("import os, sys\n")
assert tuple(main.iter_source_code((tmp_file,), DEFAULT_CONFIG, [])) == (tmp_file,)
def test_is_python_file():
assert main.is_python_file("file.py")
assert main.is_python_file("file.pyi")
assert main.is_python_file("file.pyx")
assert not main.is_python_file("file.pyc")
assert not main.is_python_file("file.txt")
assert not main.is_python_file("file.pex")
@pytest.mark.skipif(sys.platform == "win32", reason="cannot create fifo file on Windows platform")
def test_is_python_file_fifo(tmpdir):
fifo_file = os.path.join(tmpdir, "fifo_file")
os.mkfifo(fifo_file)
assert not main.is_python_file(fifo_file)
def test_isort_command():
"""Ensure ISortCommand got registered, otherwise setuptools error must have occured"""
assert main.ISortCommand
|
Add test case for iter_source_code
|
Add test case for iter_source_code
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
---
+++
@@ -5,8 +5,15 @@
from hypothesis_auto import auto_pytest_magic
from isort import main
+from isort.settings import DEFAULT_CONFIG
auto_pytest_magic(main.sort_imports)
+
+
+def test_iter_source_code(tmpdir):
+ tmp_file = tmpdir.join("file.py")
+ tmp_file.write("import os, sys\n")
+ assert tuple(main.iter_source_code((tmp_file,), DEFAULT_CONFIG, [])) == (tmp_file,)
def test_is_python_file():
|
de97d95d7746cbbf6c2c53a660553ce56d294288
|
tests/test_unit.py
|
tests/test_unit.py
|
# -*- coding: utf-8 -*-
"""
tests.test_unit
~~~~~~~~~~~~~~~
Module dedicated to testing the unit utility functions.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(to_quantity(val, 'A')) == val
|
# -*- coding: utf-8 -*-
"""
tests.test_unit
~~~~~~~~~~~~~~~
Module dedicated to testing the unit utility functions.
:copyright: 2015 by Lantz Authors, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import (division, unicode_literals, print_function,
absolute_import)
from pytest import raises, yield_fixture, mark
from lantz_core import unit
from lantz_core.unit import (set_unit_registry, get_unit_registry,
to_float, to_quantity)
try:
from pint import UnitRegistry
except ImportError:
pass
@yield_fixture
def teardown():
unit.UNIT_REGISTRY = None
yield
unit.UNIT_REGISTRY = None
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_set_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
assert get_unit_registry() is ureg
@mark.skipif(unit.UNIT_SUPPORT is False, reason="Requires Pint")
def test_reset_unit_registry(teardown):
ureg = UnitRegistry()
set_unit_registry(ureg)
with raises(ValueError):
set_unit_registry(ureg)
def test_converters(teardown):
"""Test to_quantity and to_float utility functions.
"""
val = 1.0
assert to_float(val) == val
assert to_float(to_quantity(val, 'A')) == val
|
Add missing test for to_float applied on a float (when pint is present).
|
Add missing test for to_float applied on a float (when pint is present).
|
Python
|
bsd-3-clause
|
MatthieuDartiailh/lantz_core
|
---
+++
@@ -51,4 +51,5 @@
"""
val = 1.0
+ assert to_float(val) == val
assert to_float(to_quantity(val, 'A')) == val
|
7158d44eaf764b8140675bbe7b8e2bea857edd25
|
coinotomy/main.py
|
coinotomy/main.py
|
import logging
import os
from threading import Thread
from coinotomy.config.config import STORAGE_CLASS, STORAGE_DIRECTORY, WATCHERS
log = logging.getLogger("main")
logging.basicConfig(filename=os.path.join(STORAGE_DIRECTORY, 'log.txt'),
filemode='a',
datefmt='%H:%M:%S',
level=logging.DEBUG)
def launch_worker(watcher):
backend = STORAGE_CLASS(os.path.join(STORAGE_DIRECTORY, watcher.name))
watcher.run(backend)
def main():
threads = []
log.info("starting main thread")
# launch all watchers
for watcher in WATCHERS:
thread = Thread(target=launch_worker, args=(watcher,))
thread.start()
threads.append(thread)
# block until all threads terminate
for thread in threads:
thread.join()
log.info("terminating main thread")
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
import logging
import os
from threading import Thread
from coinotomy.config.config import STORAGE_CLASS, STORAGE_DIRECTORY, WATCHERS
log = logging.getLogger("main")
logging.basicConfig(filename=os.path.join(STORAGE_DIRECTORY, 'log.txt'),
filemode='a',
datefmt='%H:%M:%S',
level=logging.DEBUG)
def launch_worker(watcher):
backend = STORAGE_CLASS(os.path.join(STORAGE_DIRECTORY, watcher.name))
watcher.run(backend)
def main():
threads = []
log.info("starting main thread")
# launch all watchers
for watcher in WATCHERS:
thread = Thread(target=launch_worker, args=(watcher,))
thread.start()
threads.append(thread)
# block until all threads terminate
for thread in threads:
thread.join()
log.info("terminating main thread")
if __name__ == '__main__':
main()
|
Add shebang for the linux folks out there.
|
Add shebang for the linux folks out there.
|
Python
|
mit
|
sDessens/coinotomy
|
---
+++
@@ -1,3 +1,5 @@
+#!/usr/bin/env python3
+
import logging
import os
|
b8f604e11270b889bafc38709814df4e1bb961dd
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.16.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.16.4"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.16.4
|
Increment version number to 0.16.4
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
---
+++
@@ -1,6 +1,6 @@
"""Configuration for Django system."""
-__version__ = "0.16.3"
+__version__ = "0.16.4"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
|
db7d56453e09981c3a3d57deb9ad3460ac086857
|
apps/api/rfid/user.py
|
apps/api/rfid/user.py
|
# -*- coding: utf-8 -*-
import logging
from django.core.exceptions import PermissionDenied
from tastypie.resources import ModelResource, ALL
from tastypie.authorization import Authorization
from apps.authentication.models import OnlineUser as User
from apps.api.rfid.auth import RfidAuthentication
class UserResource(ModelResource):
class Meta(object):
queryset = User.objects.all()
resource_name = 'user'
fields = ['username', 'first_name', 'last_name', 'rfid', ]
allowed_update_fields = ['rfid']
allowed_methods = ['get']
detail_allowed_methods = ['get', 'patch']
authorization = Authorization()
authentication = RfidAuthentication()
filtering = {
"username": ALL,
"rfid": ALL,
}
def update_in_place(self, request, original_bundle, new_data):
"""
Override to restrict patching of user fields to those specified in allowed_update_fields
"""
if set(new_data.keys()) - set(self._meta.allowed_update_fields):
raise PermissionDenied(
'Kun oppdatering av %s er tillatt.' % ', '.join(self._meta.allowed_update_fields)
)
logging.getLogger(__name__).debug('User patched: %s' % repr(original_bundle))
return super(UserResource, self).update_in_place(request, original_bundle, new_data)
|
# -*- coding: utf-8 -*-
import logging
from django.core.exceptions import PermissionDenied
from tastypie.resources import ModelResource, ALL
from tastypie.authorization import Authorization
from apps.authentication.models import OnlineUser as User
from apps.api.rfid.auth import RfidAuthentication
class UserResource(ModelResource):
class Meta(object):
queryset = User.objects.all()
resource_name = 'user'
fields = ['username', 'first_name', 'last_name', 'rfid', ]
allowed_update_fields = ['rfid']
allowed_methods = ['get']
detail_allowed_methods = ['get', 'patch']
authorization = Authorization()
authentication = RfidAuthentication()
filtering = {
"username": ALL,
"rfid": ALL,
}
def update_in_place(self, request, original_bundle, new_data):
"""
Override to restrict patching of user fields to those specified in allowed_update_fields
"""
if set(new_data.keys()) - set(self._meta.allowed_update_fields):
raise PermissionDenied(
'Kun oppdatering av %s er tillatt.' % ', '.join(self._meta.allowed_update_fields)
)
# logging.getLogger(__name__).debug('User patched: %s' % unicode(original_bundle))
return super(UserResource, self).update_in_place(request, original_bundle, new_data)
|
Comment out logger until resolved properly
|
Comment out logger until resolved properly
|
Python
|
mit
|
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
|
---
+++
@@ -35,6 +35,6 @@
'Kun oppdatering av %s er tillatt.' % ', '.join(self._meta.allowed_update_fields)
)
- logging.getLogger(__name__).debug('User patched: %s' % repr(original_bundle))
+ # logging.getLogger(__name__).debug('User patched: %s' % unicode(original_bundle))
return super(UserResource, self).update_in_place(request, original_bundle, new_data)
|
f8aa722b9b56ca543f73a40f22fd682a1c71fb4c
|
clowder_server/management/commands/send_alerts.py
|
clowder_server/management/commands/send_alerts.py
|
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_server.emailer import send_alert
from clowder_server.models import Alert
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
import datetime
from django.core.management.base import BaseCommand, CommandError
from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
from clowder_server.models import Alert, Ping
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
# delete old pings
for user in ClowderUser.objects.all():
pings = Ping.objects.filter(user=user)[:500]
pings = list(pings) # forces database hit
Ping.objects.exclude(pk__in=pings).delete()
# send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
alert.notify_at = None
alert.save()
|
Delete old unused pings from users
|
Delete old unused pings from users
|
Python
|
agpl-3.0
|
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server,keithhackbarth/clowder_server,framewr/clowder_server,framewr/clowder_server
|
---
+++
@@ -2,13 +2,22 @@
from django.core.management.base import BaseCommand, CommandError
+from clowder_account.models import ClowderUser
from clowder_server.emailer import send_alert
-from clowder_server.models import Alert
+from clowder_server.models import Alert, Ping
class Command(BaseCommand):
help = 'Checks and sends alerts'
def handle(self, *args, **options):
+
+ # delete old pings
+ for user in ClowderUser.objects.all():
+ pings = Ping.objects.filter(user=user)[:500]
+ pings = list(pings) # forces database hit
+ Ping.objects.exclude(pk__in=pings).delete()
+
+ # send alerts
alerts = Alert.objects.filter(notify_at__lte=datetime.datetime.now)
for alert in alerts:
send_alert(request.user, alert.name)
|
10d09367111d610e82344e9616aab98815bf9397
|
capture_chessboard.py
|
capture_chessboard.py
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Capture calibration chessboard
#
# External dependencies
import time
import cv2
import Calibration
# Calibration pattern size
pattern_size = ( 9, 6 )
# Get the camera
camera = cv2.VideoCapture( 1 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Display the chessboard on the image
chessboard = Calibration.PreviewChessboard( image, pattern_size )
# Display the resulting image
cv2.imshow( 'USB Camera', chessboard )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Space : save the image
elif key == 32 :
current_time = time.strftime( '%Y%m%d_%H%M%S' )
print( 'Save image {} to disk...'.format( current_time ) )
cv2.imwrite( 'image-{}.png'.format( current_time ), image )
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Capture calibration chessboard
#
# External dependencies
import time
import cv2
import numpy as np
import Calibration
# Calibration pattern size
pattern_size = ( 9, 6 )
# Get the camera
camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
# Copy the image for display
chessboard = np.copy( image )
# Display the chessboard on the image
Calibration.PreviewChessboard( chessboard, pattern_size )
# Display the resulting image
cv2.imshow( 'USB Camera', chessboard )
# Keyboard interruption
key = cv2.waitKey( 1 ) & 0xFF
# Escape : quit the application
if key == 27 : break
# Space : save the image
elif key == 32 :
current_time = time.strftime( '%Y%m%d_%H%M%S' )
print( 'Save image {} to disk...'.format( current_time ) )
cv2.imwrite( 'image-{}.png'.format( current_time ), image )
# Release the camera
camera.release()
# Close OpenCV windows
cv2.destroyAllWindows()
|
Change camera index, and fix the chessboard preview.
|
Change camera index, and fix the chessboard preview.
|
Python
|
mit
|
microy/RobotVision,microy/RobotVision
|
---
+++
@@ -8,18 +8,21 @@
# External dependencies
import time
import cv2
+import numpy as np
import Calibration
# Calibration pattern size
pattern_size = ( 9, 6 )
# Get the camera
-camera = cv2.VideoCapture( 1 )
+camera = cv2.VideoCapture( 0 )
# Acquisition loop
while( True ) :
# Capture image-by-image
_, image = camera.read()
+ # Copy the image for display
+ chessboard = np.copy( image )
# Display the chessboard on the image
- chessboard = Calibration.PreviewChessboard( image, pattern_size )
+ Calibration.PreviewChessboard( chessboard, pattern_size )
# Display the resulting image
cv2.imshow( 'USB Camera', chessboard )
# Keyboard interruption
|
e7825da0f8467717aac9857bbc046d946aa2ce66
|
script/lib/config.py
|
script/lib/config.py
|
#!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '56984fa0e4c3c745652510f342c0fb2724d846c2'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
|
#!/usr/bin/env python
import platform
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = '2dfdf169b582e3f051e1fec3dd7df2bc179e1aa6'
ARCH = {
'cygwin': '32bit',
'darwin': '64bit',
'linux2': platform.architecture()[0],
'win32': '32bit',
}[sys.platform]
DIST_ARCH = {
'32bit': 'ia32',
'64bit': 'x64',
}[ARCH]
TARGET_PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
|
Upgrade libchromiumcontent to discard iframe security settings
|
Upgrade libchromiumcontent to discard iframe security settings
|
Python
|
mit
|
astoilkov/electron,tylergibson/electron,yalexx/electron,bruce/electron,stevekinney/electron,kikong/electron,beni55/electron,nagyistoce/electron-atom-shell,GoooIce/electron,xiruibing/electron,timruffles/electron,thomsonreuters/electron,thomsonreuters/electron,gabrielPeart/electron,stevemao/electron,baiwyc119/electron,jcblw/electron,vipulroxx/electron,fomojola/electron,Ivshti/electron,stevekinney/electron,destan/electron,baiwyc119/electron,jlord/electron,thingsinjars/electron,jannishuebl/electron,tinydew4/electron,LadyNaggaga/electron,bobwol/electron,robinvandernoord/electron,RobertJGabriel/electron,bpasero/electron,d-salas/electron,DivyaKMenon/electron,deed02392/electron,lzpfmh/electron,carsonmcdonald/electron,kostia/electron,Jacobichou/electron,shaundunne/electron,abhishekgahlot/electron,shennushi/electron,eric-seekas/electron,brenca/electron,dongjoon-hyun/electron,joneit/electron,howmuchcomputer/electron,rreimann/electron,soulteary/electron,zhakui/electron,nekuz0r/electron,leolujuyi/electron,xfstudio/electron,kikong/electron,egoist/electron,pirafrank/electron,thingsinjars/electron,JesselJohn/electron,brenca/electron,eriser/electron,the-ress/electron,MaxGraey/electron,egoist/electron,stevekinney/electron,Rokt33r/electron,mhkeller/electron,bwiggs/electron,jhen0409/electron,systembugtj/electron,yan-foto/electron,medixdev/electron,oiledCode/electron,twolfson/electron,jlhbaseball15/electron,saronwei/electron,eriser/electron,davazp/electron,farmisen/electron,pandoraui/electron,John-Lin/electron,bitemyapp/electron,aaron-goshine/electron,rreimann/electron,jhen0409/electron,minggo/electron,noikiy/electron,mhkeller/electron,chrisswk/electron,roadev/electron,etiktin/electron,electron/electron,electron/electron,thingsinjars/electron,subblue/electron,davazp/electron,bobwol/electron,darwin/electron,bruce/electron,miniak/electron,faizalpribadi/electron,mattdesl/electron,felixrieseberg/electron,howmuchcomputer/electron,sky7sea/electron,edulan/electron,icattlecoder/electron,GoooIce/electron,faizalpribadi/electron,natgolov/electron,maxogden/atom-shell,jonatasfreitasv/electron,mattotodd/electron,jtburke/electron,Evercoder/electron,jonatasfreitasv/electron,shaundunne/electron,brave/muon,mjaniszew/electron,Faiz7412/electron,mubassirhayat/electron,oiledCode/electron,vipulroxx/electron,rsvip/electron,tinydew4/electron,vipulroxx/electron,deed02392/electron,brave/muon,RobertJGabriel/electron,jaanus/electron,benweissmann/electron,kikong/electron,gstack/infinium-shell,rsvip/electron,shockone/electron,xfstudio/electron,adcentury/electron,takashi/electron,yalexx/electron,simongregory/electron,gabriel/electron,nicobot/electron,egoist/electron,wolfflow/electron,joneit/electron,eric-seekas/electron,etiktin/electron,xfstudio/electron,gerhardberger/electron,lzpfmh/electron,gabriel/electron,leftstick/electron,Neron-X5/electron,adcentury/electron,Ivshti/electron,tomashanacek/electron,mirrh/electron,jannishuebl/electron,davazp/electron,fomojola/electron,jannishuebl/electron,aaron-goshine/electron,cqqccqc/electron,leethomas/electron,joneit/electron,joaomoreno/atom-shell,BionicClick/electron,jtburke/electron,destan/electron,joneit/electron,rajatsingla28/electron,digideskio/electron,egoist/electron,ianscrivener/electron,mrwizard82d1/electron,vaginessa/electron,fffej/electron,thomsonreuters/electron,John-Lin/electron,zhakui/electron,vHanda/electron,gbn972/electron,DivyaKMenon/electron,lrlna/electron,cqqccqc/electron,vaginessa/electron,preco21/electron,kostia/electron,synaptek/electron,thompsonemerson/electron,iftekeriba/electron,yalexx/electron,jacksondc/electron,bobwol/electron,simongregory/electron,arturts/electron,setzer777/electron,bitemyapp/electron,micalan/electron,tinydew4/electron,aichingm/electron,edulan/electron,trankmichael/electron,posix4e/electron,bbondy/electron,preco21/electron,trigrass2/electron,mrwizard82d1/electron,eric-seekas/electron,farmisen/electron,nicobot/electron,vaginessa/electron,kcrt/electron,takashi/electron,xiruibing/electron,jlhbaseball15/electron,pombredanne/electron,mjaniszew/electron,chriskdon/electron,aecca/electron,tomashanacek/electron,pandoraui/electron,abhishekgahlot/electron,deepak1556/atom-shell,anko/electron,dahal/electron,systembugtj/electron,Gerhut/electron,Evercoder/electron,simonfork/electron,ervinb/electron,stevekinney/electron,anko/electron,aaron-goshine/electron,kazupon/electron,fomojola/electron,eric-seekas/electron,jonatasfreitasv/electron,seanchas116/electron,minggo/electron,smczk/electron,gabrielPeart/electron,robinvandernoord/electron,lzpfmh/electron,evgenyzinoviev/electron,christian-bromann/electron,brave/muon,bitemyapp/electron,shennushi/electron,kikong/electron,subblue/electron,nekuz0r/electron,medixdev/electron,SufianHassan/electron,matiasinsaurralde/electron,GoooIce/electron,Jacobichou/electron,jtburke/electron,mirrh/electron,tonyganch/electron,meowlab/electron,JussMee15/electron,brave/electron,voidbridge/electron,jaanus/electron,icattlecoder/electron,trankmichael/electron,egoist/electron,jsutcodes/electron,arusakov/electron,joneit/electron,oiledCode/electron,kazupon/electron,fomojola/electron,bwiggs/electron,cos2004/electron,fabien-d/electron,Andrey-Pavlov/electron,cos2004/electron,pirafrank/electron,soulteary/electron,chriskdon/electron,thomsonreuters/electron,jiaz/electron,kostia/electron,mjaniszew/electron,ianscrivener/electron,adamjgray/electron,synaptek/electron,howmuchcomputer/electron,anko/electron,jaanus/electron,faizalpribadi/electron,kokdemo/electron,beni55/electron,jannishuebl/electron,shiftkey/electron,electron/electron,jiaz/electron,hokein/atom-shell,jjz/electron,BionicClick/electron,d-salas/electron,IonicaBizauKitchen/electron,leolujuyi/electron,Zagorakiss/electron,leolujuyi/electron,LadyNaggaga/electron,bright-sparks/electron,mhkeller/electron,seanchas116/electron,darwin/electron,jlhbaseball15/electron,maxogden/atom-shell,nekuz0r/electron,bitemyapp/electron,gerhardberger/electron,tylergibson/electron,ianscrivener/electron,egoist/electron,webmechanicx/electron,bpasero/electron,sshiting/electron,gabriel/electron,setzer777/electron,aichingm/electron,oiledCode/electron,aecca/electron,jlhbaseball15/electron,posix4e/electron,noikiy/electron,JussMee15/electron,renaesop/electron,renaesop/electron,vipulroxx/electron,electron/electron,cqqccqc/electron,adamjgray/electron,the-ress/electron,mattdesl/electron,joaomoreno/atom-shell,matiasinsaurralde/electron,rhencke/electron,fireball-x/atom-shell,leftstick/electron,jtburke/electron,simongregory/electron,icattlecoder/electron,vHanda/electron,beni55/electron,tinydew4/electron,tomashanacek/electron,IonicaBizauKitchen/electron,chrisswk/electron,shiftkey/electron,seanchas116/electron,vipulroxx/electron,hokein/atom-shell,MaxWhere/electron,kenmozi/electron,DivyaKMenon/electron,simongregory/electron,shaundunne/electron,howmuchcomputer/electron,minggo/electron,iftekeriba/electron,meowlab/electron,farmisen/electron,ankitaggarwal011/electron,bbondy/electron,ankitaggarwal011/electron,tincan24/electron,aaron-goshine/electron,icattlecoder/electron,d-salas/electron,d-salas/electron,thingsinjars/electron,yan-foto/electron,oiledCode/electron,leethomas/electron,iftekeriba/electron,arturts/electron,wan-qy/electron,aichingm/electron,Evercoder/electron,edulan/electron,Floato/electron,miniak/electron,jacksondc/electron,trankmichael/electron,evgenyzinoviev/electron,gabriel/electron,jlhbaseball15/electron,abhishekgahlot/electron,setzer777/electron,Gerhut/electron,twolfson/electron,kazupon/electron,bpasero/electron,kokdemo/electron,RIAEvangelist/electron,biblerule/UMCTelnetHub,Andrey-Pavlov/electron,trigrass2/electron,astoilkov/electron,deepak1556/atom-shell,simongregory/electron,subblue/electron,soulteary/electron,SufianHassan/electron,pandoraui/electron,jsutcodes/electron,gbn972/electron,ankitaggarwal011/electron,fritx/electron,edulan/electron,rajatsingla28/electron,howmuchcomputer/electron,Gerhut/electron,trigrass2/electron,destan/electron,yan-foto/electron,kenmozi/electron,arusakov/electron,vHanda/electron,takashi/electron,bwiggs/electron,abhishekgahlot/electron,voidbridge/electron,smczk/electron,wan-qy/electron,trankmichael/electron,pirafrank/electron,jsutcodes/electron,rajatsingla28/electron,shiftkey/electron,shiftkey/electron,wolfflow/electron,neutrous/electron,Jonekee/electron,sky7sea/electron,JesselJohn/electron,meowlab/electron,jcblw/electron,michaelchiche/electron,Rokt33r/electron,zhakui/electron,systembugtj/electron,Ivshti/electron,trigrass2/electron,deed02392/electron,rhencke/electron,synaptek/electron,shockone/electron,pirafrank/electron,ianscrivener/electron,beni55/electron,kokdemo/electron,arturts/electron,natgolov/electron,adcentury/electron,mubassirhayat/electron,gerhardberger/electron,digideskio/electron,baiwyc119/electron,evgenyzinoviev/electron,eric-seekas/electron,sircharleswatson/electron,mjaniszew/electron,maxogden/atom-shell,subblue/electron,thompsonemerson/electron,aaron-goshine/electron,coderhaoxin/electron,Faiz7412/electron,chriskdon/electron,webmechanicx/electron,renaesop/electron,bwiggs/electron,mattdesl/electron,wolfflow/electron,neutrous/electron,gabrielPeart/electron,John-Lin/electron,Zagorakiss/electron,timruffles/electron,aecca/electron,mirrh/electron,eriser/electron,greyhwndz/electron,wan-qy/electron,renaesop/electron,faizalpribadi/electron,brave/electron,tylergibson/electron,rhencke/electron,stevekinney/electron,jjz/electron,cos2004/electron,darwin/electron,medixdev/electron,shaundunne/electron,jsutcodes/electron,GoooIce/electron,gabriel/electron,IonicaBizauKitchen/electron,mrwizard82d1/electron,kenmozi/electron,howmuchcomputer/electron,Zagorakiss/electron,systembugtj/electron,saronwei/electron,rajatsingla28/electron,greyhwndz/electron,dkfiresky/electron,GoooIce/electron,voidbridge/electron,neutrous/electron,mattotodd/electron,cqqccqc/electron,tincan24/electron,Evercoder/electron,Neron-X5/electron,MaxGraey/electron,pombredanne/electron,stevemao/electron,bpasero/electron,minggo/electron,beni55/electron,the-ress/electron,nagyistoce/electron-atom-shell,voidbridge/electron,christian-bromann/electron,eriser/electron,sircharleswatson/electron,the-ress/electron,timruffles/electron,noikiy/electron,farmisen/electron,aecca/electron,brave/electron,xiruibing/electron,BionicClick/electron,gamedevsam/electron,jjz/electron,kikong/electron,dahal/electron,adamjgray/electron,coderhaoxin/electron,kcrt/electron,Evercoder/electron,simonfork/electron,kcrt/electron,fireball-x/atom-shell,ankitaggarwal011/electron,Jonekee/electron,gabrielPeart/electron,beni55/electron,posix4e/electron,faizalpribadi/electron,deed02392/electron,christian-bromann/electron,thompsonemerson/electron,deed02392/electron,tinydew4/electron,wolfflow/electron,maxogden/atom-shell,jaanus/electron,deepak1556/atom-shell,pirafrank/electron,icattlecoder/electron,micalan/electron,xfstudio/electron,timruffles/electron,bruce/electron,meowlab/electron,adamjgray/electron,Jacobichou/electron,IonicaBizauKitchen/electron,neutrous/electron,tonyganch/electron,BionicClick/electron,mhkeller/electron,matiasinsaurralde/electron,lzpfmh/electron,mattotodd/electron,bright-sparks/electron,the-ress/electron,icattlecoder/electron,jannishuebl/electron,jjz/electron,rprichard/electron,brave/electron,shockone/electron,oiledCode/electron,jjz/electron,bitemyapp/electron,yalexx/electron,subblue/electron,the-ress/electron,matiasinsaurralde/electron,carsonmcdonald/electron,anko/electron,brenca/electron,jacksondc/electron,mrwizard82d1/electron,jonatasfreitasv/electron,etiktin/electron,Ivshti/electron,chrisswk/electron,biblerule/UMCTelnetHub,webmechanicx/electron,rajatsingla28/electron,vHanda/electron,bright-sparks/electron,kostia/electron,saronwei/electron,mrwizard82d1/electron,BionicClick/electron,digideskio/electron,bbondy/electron,thomsonreuters/electron,Neron-X5/electron,hokein/atom-shell,tincan24/electron,gabrielPeart/electron,lrlna/electron,biblerule/UMCTelnetHub,pombredanne/electron,etiktin/electron,mattotodd/electron,tincan24/electron,trigrass2/electron,jcblw/electron,aaron-goshine/electron,jcblw/electron,adcentury/electron,lrlna/electron,fomojola/electron,robinvandernoord/electron,nicholasess/electron,shiftkey/electron,MaxGraey/electron,fffej/electron,cos2004/electron,joneit/electron,gbn972/electron,Jonekee/electron,wan-qy/electron,sshiting/electron,jsutcodes/electron,evgenyzinoviev/electron,vaginessa/electron,bitemyapp/electron,LadyNaggaga/electron,Andrey-Pavlov/electron,neutrous/electron,carsonmcdonald/electron,baiwyc119/electron,carsonmcdonald/electron,jaanus/electron,DivyaKMenon/electron,BionicClick/electron,yan-foto/electron,MaxWhere/electron,simonfork/electron,gbn972/electron,jlhbaseball15/electron,yalexx/electron,mirrh/electron,RIAEvangelist/electron,dkfiresky/electron,rajatsingla28/electron,tincan24/electron,leethomas/electron,Faiz7412/electron,deepak1556/atom-shell,robinvandernoord/electron,bpasero/electron,subblue/electron,gstack/infinium-shell,twolfson/electron,edulan/electron,gerhardberger/electron,rsvip/electron,bpasero/electron,coderhaoxin/electron,smczk/electron,webmechanicx/electron,pirafrank/electron,biblerule/UMCTelnetHub,MaxWhere/electron,aliib/electron,soulteary/electron,Andrey-Pavlov/electron,ankitaggarwal011/electron,anko/electron,mattotodd/electron,wan-qy/electron,miniak/electron,pombredanne/electron,tonyganch/electron,thompsonemerson/electron,jiaz/electron,rreimann/electron,leftstick/electron,nicobot/electron,roadev/electron,greyhwndz/electron,setzer777/electron,fritx/electron,faizalpribadi/electron,twolfson/electron,Jonekee/electron,sshiting/electron,jacksondc/electron,fireball-x/atom-shell,simonfork/electron,arturts/electron,dahal/electron,greyhwndz/electron,takashi/electron,jiaz/electron,zhakui/electron,chriskdon/electron,ianscrivener/electron,mirrh/electron,micalan/electron,joaomoreno/atom-shell,arusakov/electron,lrlna/electron,bbondy/electron,micalan/electron,xfstudio/electron,fabien-d/electron,jtburke/electron,kcrt/electron,wolfflow/electron,evgenyzinoviev/electron,shennushi/electron,bright-sparks/electron,bwiggs/electron,sky7sea/electron,sshiting/electron,preco21/electron,zhakui/electron,christian-bromann/electron,mubassirhayat/electron,jlord/electron,sky7sea/electron,chriskdon/electron,stevemao/electron,roadev/electron,fireball-x/atom-shell,pandoraui/electron,noikiy/electron,michaelchiche/electron,gerhardberger/electron,jhen0409/electron,dahal/electron,JesselJohn/electron,aecca/electron,greyhwndz/electron,Floato/electron,lzpfmh/electron,Jonekee/electron,michaelchiche/electron,JesselJohn/electron,nicholasess/electron,leftstick/electron,miniak/electron,carsonmcdonald/electron,eric-seekas/electron,jtburke/electron,jjz/electron,leethomas/electron,Jacobichou/electron,Gerhut/electron,tomashanacek/electron,bright-sparks/electron,benweissmann/electron,fffej/electron,meowlab/electron,Evercoder/electron,deed02392/electron,arturts/electron,darwin/electron,kazupon/electron,mhkeller/electron,sircharleswatson/electron,tonyganch/electron,preco21/electron,aliib/electron,dongjoon-hyun/electron,nicholasess/electron,digideskio/electron,posix4e/electron,jacksondc/electron,adcentury/electron,electron/electron,dahal/electron,shennushi/electron,arusakov/electron,MaxWhere/electron,dkfiresky/electron,trankmichael/electron,mubassirhayat/electron,brenca/electron,robinvandernoord/electron,gamedevsam/electron,destan/electron,renaesop/electron,Neron-X5/electron,bbondy/electron,gbn972/electron,xfstudio/electron,brave/muon,IonicaBizauKitchen/electron,gamedevsam/electron,jsutcodes/electron,twolfson/electron,ankitaggarwal011/electron,xiruibing/electron,Rokt33r/electron,astoilkov/electron,kostia/electron,gamedevsam/electron,leethomas/electron,eriser/electron,RIAEvangelist/electron,xiruibing/electron,davazp/electron,vHanda/electron,bobwol/electron,shiftkey/electron,felixrieseberg/electron,mjaniszew/electron,noikiy/electron,takashi/electron,joaomoreno/atom-shell,seanchas116/electron,adcentury/electron,fireball-x/atom-shell,pombredanne/electron,gerhardberger/electron,gstack/infinium-shell,adamjgray/electron,shockone/electron,tonyganch/electron,aliib/electron,arusakov/electron,micalan/electron,tylergibson/electron,Jacobichou/electron,Floato/electron,Faiz7412/electron,tinydew4/electron,thingsinjars/electron,arusakov/electron,jlord/electron,MaxGraey/electron,Rokt33r/electron,shockone/electron,SufianHassan/electron,brave/electron,LadyNaggaga/electron,leethomas/electron,tomashanacek/electron,systembugtj/electron,dkfiresky/electron,joaomoreno/atom-shell,rhencke/electron,aichingm/electron,Ivshti/electron,astoilkov/electron,MaxWhere/electron,destan/electron,John-Lin/electron,chrisswk/electron,preco21/electron,soulteary/electron,d-salas/electron,astoilkov/electron,nicholasess/electron,vHanda/electron,jaanus/electron,RobertJGabriel/electron,cos2004/electron,vipulroxx/electron,iftekeriba/electron,wolfflow/electron,felixrieseberg/electron,benweissmann/electron,hokein/atom-shell,brenca/electron,evgenyzinoviev/electron,trankmichael/electron,thomsonreuters/electron,JesselJohn/electron,tylergibson/electron,nicholasess/electron,bruce/electron,systembugtj/electron,saronwei/electron,IonicaBizauKitchen/electron,baiwyc119/electron,JussMee15/electron,coderhaoxin/electron,smczk/electron,aichingm/electron,jlord/electron,dahal/electron,yan-foto/electron,miniak/electron,fritx/electron,natgolov/electron,mirrh/electron,iftekeriba/electron,dongjoon-hyun/electron,mrwizard82d1/electron,aliib/electron,electron/electron,leolujuyi/electron,leolujuyi/electron,the-ress/electron,leftstick/electron,JussMee15/electron,Gerhut/electron,DivyaKMenon/electron,John-Lin/electron,RobertJGabriel/electron,twolfson/electron,ianscrivener/electron,mattdesl/electron,bobwol/electron,michaelchiche/electron,anko/electron,bwiggs/electron,roadev/electron,MaxWhere/electron,setzer777/electron,ervinb/electron,DivyaKMenon/electron,bruce/electron,rhencke/electron,simonfork/electron,shennushi/electron,RIAEvangelist/electron,jhen0409/electron,robinvandernoord/electron,posix4e/electron,aliib/electron,minggo/electron,nicobot/electron,sircharleswatson/electron,LadyNaggaga/electron,stevemao/electron,minggo/electron,synaptek/electron,JussMee15/electron,gamedevsam/electron,JesselJohn/electron,felixrieseberg/electron,vaginessa/electron,gstack/infinium-shell,mattdesl/electron,bbondy/electron,stevemao/electron,fffej/electron,coderhaoxin/electron,rhencke/electron,simongregory/electron,SufianHassan/electron,coderhaoxin/electron,medixdev/electron,neutrous/electron,roadev/electron,sircharleswatson/electron,benweissmann/electron,matiasinsaurralde/electron,fabien-d/electron,gamedevsam/electron,Andrey-Pavlov/electron,jacksondc/electron,nicholasess/electron,brave/muon,astoilkov/electron,Rokt33r/electron,kokdemo/electron,greyhwndz/electron,ervinb/electron,MaxGraey/electron,nekuz0r/electron,trigrass2/electron,noikiy/electron,biblerule/UMCTelnetHub,edulan/electron,eriser/electron,jannishuebl/electron,fffej/electron,aliib/electron,jhen0409/electron,shockone/electron,farmisen/electron,seanchas116/electron,gstack/infinium-shell,rreimann/electron,Jacobichou/electron,mattotodd/electron,synaptek/electron,bright-sparks/electron,natgolov/electron,nagyistoce/electron-atom-shell,leolujuyi/electron,jiaz/electron,pandoraui/electron,nagyistoce/electron-atom-shell,tonyganch/electron,jhen0409/electron,takashi/electron,fritx/electron,adamjgray/electron,davazp/electron,shaundunne/electron,rprichard/electron,fritx/electron,christian-bromann/electron,miniak/electron,jcblw/electron,tylergibson/electron,brave/electron,cqqccqc/electron,saronwei/electron,kenmozi/electron,christian-bromann/electron,abhishekgahlot/electron,meowlab/electron,Floato/electron,rsvip/electron,Gerhut/electron,lrlna/electron,stevekinney/electron,wan-qy/electron,rprichard/electron,RobertJGabriel/electron,GoooIce/electron,Zagorakiss/electron,benweissmann/electron,matiasinsaurralde/electron,jlord/electron,Jonekee/electron,thompsonemerson/electron,nicobot/electron,dongjoon-hyun/electron,bruce/electron,chriskdon/electron,voidbridge/electron,leftstick/electron,dongjoon-hyun/electron,jonatasfreitasv/electron,nagyistoce/electron-atom-shell,fabien-d/electron,vaginessa/electron,RobertJGabriel/electron,brave/muon,seanchas116/electron,micalan/electron,mattdesl/electron,kazupon/electron,yalexx/electron,rreimann/electron,gerhardberger/electron,pandoraui/electron,mhkeller/electron,pombredanne/electron,ervinb/electron,kcrt/electron,nekuz0r/electron,sshiting/electron,aichingm/electron,rsvip/electron,voidbridge/electron,etiktin/electron,kenmozi/electron,gbn972/electron,fabien-d/electron,SufianHassan/electron,thingsinjars/electron,sshiting/electron,tomashanacek/electron,zhakui/electron,LadyNaggaga/electron,shennushi/electron,brenca/electron,dongjoon-hyun/electron,darwin/electron,hokein/atom-shell,renaesop/electron,lrlna/electron,preco21/electron,timruffles/electron,kcrt/electron,SufianHassan/electron,deepak1556/atom-shell,rreimann/electron,dkfiresky/electron,roadev/electron,sky7sea/electron,Zagorakiss/electron,gabriel/electron,sircharleswatson/electron,kenmozi/electron,Faiz7412/electron,rprichard/electron,thompsonemerson/electron,d-salas/electron,soulteary/electron,farmisen/electron,jonatasfreitasv/electron,webmechanicx/electron,jcblw/electron,felixrieseberg/electron,carsonmcdonald/electron,dkfiresky/electron,nicobot/electron,tincan24/electron,aecca/electron,digideskio/electron,Neron-X5/electron,digideskio/electron,ervinb/electron,natgolov/electron,michaelchiche/electron,natgolov/electron,joaomoreno/atom-shell,destan/electron,nekuz0r/electron,baiwyc119/electron,etiktin/electron,RIAEvangelist/electron,RIAEvangelist/electron,cqqccqc/electron,yan-foto/electron,saronwei/electron,benweissmann/electron,kokdemo/electron,shaundunne/electron,abhishekgahlot/electron,davazp/electron,fffej/electron,bobwol/electron,Neron-X5/electron,setzer777/electron,medixdev/electron,chrisswk/electron,iftekeriba/electron,kazupon/electron,xiruibing/electron,cos2004/electron,jiaz/electron,ervinb/electron,Floato/electron,bpasero/electron,felixrieseberg/electron,JussMee15/electron,stevemao/electron,kokdemo/electron,sky7sea/electron,smczk/electron,smczk/electron,maxogden/atom-shell,Rokt33r/electron,simonfork/electron,mjaniszew/electron,webmechanicx/electron,arturts/electron,Zagorakiss/electron,fomojola/electron,fritx/electron,biblerule/UMCTelnetHub,electron/electron,gabrielPeart/electron,kostia/electron,medixdev/electron,John-Lin/electron,posix4e/electron,Andrey-Pavlov/electron,Floato/electron,lzpfmh/electron,mubassirhayat/electron,synaptek/electron,michaelchiche/electron
|
---
+++
@@ -4,7 +4,7 @@
import sys
BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent'
-LIBCHROMIUMCONTENT_COMMIT = '56984fa0e4c3c745652510f342c0fb2724d846c2'
+LIBCHROMIUMCONTENT_COMMIT = '2dfdf169b582e3f051e1fec3dd7df2bc179e1aa6'
ARCH = {
'cygwin': '32bit',
|
6ad448568acc130118d382b29a2ea1930f738a3f
|
tohu/derived_generators_NEW.py
|
tohu/derived_generators_NEW.py
|
import logging
from operator import attrgetter
from .base_NEW import TohuUltraBaseGenerator
__all__ = ['ExtractAttribute']
logger = logging.getLogger('tohu')
class ExtractAttribute(TohuUltraBaseGenerator):
"""
Generator which produces items that are attributes extracted from
the items produced by a different generator.
"""
def __init__(self, g, attr_name):
logger.debug(f"Extracting attribute '{attr_name}' from parent={g}")
self.parent = g
self.gen = g.clone()
self.attr_name = attr_name
self.attrgetter = attrgetter(attr_name)
def __repr__(self):
return f"<ExtractAttribute '{self.attr_name}' from {self.parent} >"
def spawn(self, dependency_mapping):
logger.warning(f'ExtractAttribute.spawn(): dependency_mapping={dependency_mapping}')
raise NotImplementedError()
def __next__(self):
return self.attrgetter(next(self.gen))
|
import logging
from operator import attrgetter
from .base_NEW import TohuUltraBaseGenerator
__all__ = ['ExtractAttribute']
logger = logging.getLogger('tohu')
class ExtractAttribute(TohuUltraBaseGenerator):
"""
Generator which produces items that are attributes extracted from
the items produced by a different generator.
"""
def __init__(self, g, attr_name):
logger.debug(f"Extracting attribute '{attr_name}' from parent={g}")
self.parent = g
self.gen = g.clone()
self.attr_name = attr_name
self.attrgetter = attrgetter(attr_name)
def __repr__(self):
return f"<ExtractAttribute '{self.attr_name}' from {self.parent} >"
def spawn(self, dependency_mapping):
logger.warning(f'ExtractAttribute.spawn(): dependency_mapping={dependency_mapping}')
raise NotImplementedError()
def __next__(self):
return self.attrgetter(next(self.gen))
def reset(self, seed):
logger.debug(f"Ignoring explicit reset() on derived generator: {self}")
def reset_clone(self, seed):
logger.warning("TODO: rename method reset_clone() to reset_dependent_generator() because ExtractAttribute is not a direct clone")
self.gen.reset(seed)
|
Add reset methods to ExtractAttribute
|
Add reset methods to ExtractAttribute
|
Python
|
mit
|
maxalbert/tohu
|
---
+++
@@ -29,3 +29,10 @@
def __next__(self):
return self.attrgetter(next(self.gen))
+
+ def reset(self, seed):
+ logger.debug(f"Ignoring explicit reset() on derived generator: {self}")
+
+ def reset_clone(self, seed):
+ logger.warning("TODO: rename method reset_clone() to reset_dependent_generator() because ExtractAttribute is not a direct clone")
+ self.gen.reset(seed)
|
21f152589550c1c168a856798690b9cf957653db
|
akanda/horizon/routers/views.py
|
akanda/horizon/routers/views.py
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Right now we are listing, for both normal and
# admin users, all the ports on the user's networks
# the router is associated with. We may want in the
# future show the ports on the mgt and the external
# networks for the admin users.
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] == 'network:router_interface']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon import exceptions
from openstack_dashboard import api
def get_interfaces_data(self):
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
# Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
'Port list can not be retrieved for router ID %s' %
self.kwargs.get('router_id')
)
exceptions.handle(self.request, msg)
for p in ports:
p.set_id_as_name_if_empty()
return ports
|
Modify the interfaces listing view to filter only the port on the mgt network
|
Modify the interfaces listing view to filter only the port on
the mgt network
DHC-1512
Change-Id: If7e5aebf7cfd7e87df0dea8cd749764c142f1676
Signed-off-by: Rosario Di Somma <73b2fe5f91895aea2b4d0e8942a5edf9f18fa897@dreamhost.com>
|
Python
|
apache-2.0
|
dreamhost/akanda-horizon,dreamhost/akanda-horizon
|
---
+++
@@ -8,13 +8,9 @@
try:
router_id = self.kwargs['router_id']
router = api.quantum.router_get(self.request, router_id)
- # Note(rods): Right now we are listing, for both normal and
- # admin users, all the ports on the user's networks
- # the router is associated with. We may want in the
- # future show the ports on the mgt and the external
- # networks for the admin users.
+ # Note(rods): Filter off the port on the mgt network
ports = [api.quantum.Port(p) for p in router.ports
- if p['device_owner'] == 'network:router_interface']
+ if p['device_owner'] != 'network:router_management']
except Exception:
ports = []
msg = _(
|
c1e17f9501fb9afc69f9fba288fa9e4cfac262e2
|
tviit/models.py
|
tviit/models.py
|
from __future__ import unicode_literals
from django.conf import settings
import uuid
from django.db import models
class Tviit(models.Model):
uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False)
sender = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name="Tviit sender",
)
content = models.TextField(max_length=160)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
reply = models.ForeignKey("self", null=True, blank=True)
class Meta:
ordering = ('created',)
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import models
from django.utils.deconstruct import deconstructible
from django.dispatch import receiver
from django.forms import ModelForm
import uuid, os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@deconstructible
class PathAndRename(object):
def __init__(self, sub_path):
self.path = sub_path
def __call__(self, instance, filename):
ext = filename.split('.')[-1]
# set filename as random string
filename = '{}.{}'.format(uuid.uuid4().hex, ext)
# return the whole path to the file
return os.path.join(self.path, filename)
path_and_rename = PathAndRename("attachments")
class Tviit(models.Model):
uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False)
sender = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
verbose_name="Tviit sender",
)
content = models.TextField(max_length=160)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
image = models.ImageField(upload_to=path_and_rename, null=True, blank=True)
reply = models.ForeignKey("self", null=True, blank=True)
class Meta:
ordering = ('created',)
def __str__(self):
return '%s - %s' % (self.created, self.sender.username)
class TviitForm(ModelForm):
class Meta:
model = Tviit
fields = ['content', 'image']
class EditTviitForm(ModelForm):
#attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500)
class Meta:
model = Tviit
fields = ['content', 'image']
|
Add image into Tviit Model Add PathAndRename function to rename image path Add TviitForm
|
Add image into Tviit Model
Add PathAndRename function to rename image path
Add TviitForm
|
Python
|
mit
|
DeWaster/Tviserrys,DeWaster/Tviserrys
|
---
+++
@@ -1,11 +1,31 @@
from __future__ import unicode_literals
from django.conf import settings
-import uuid
+from django.db import models
+from django.utils.deconstruct import deconstructible
+from django.dispatch import receiver
+from django.forms import ModelForm
+import uuid, os
+
+BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+@deconstructible
+class PathAndRename(object):
+
+ def __init__(self, sub_path):
+ self.path = sub_path
+
+ def __call__(self, instance, filename):
+ ext = filename.split('.')[-1]
+ # set filename as random string
+ filename = '{}.{}'.format(uuid.uuid4().hex, ext)
+ # return the whole path to the file
+ return os.path.join(self.path, filename)
+
+path_and_rename = PathAndRename("attachments")
-from django.db import models
+class Tviit(models.Model):
-class Tviit(models.Model):
uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False)
sender = models.ForeignKey(
settings.AUTH_USER_MODEL,
@@ -15,8 +35,29 @@
content = models.TextField(max_length=160)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
+ image = models.ImageField(upload_to=path_and_rename, null=True, blank=True)
reply = models.ForeignKey("self", null=True, blank=True)
-
class Meta:
ordering = ('created',)
+
+ def __str__(self):
+ return '%s - %s' % (self.created, self.sender.username)
+
+
+
+
+
+class TviitForm(ModelForm):
+
+ class Meta:
+ model = Tviit
+ fields = ['content', 'image']
+
+
+class EditTviitForm(ModelForm):
+ #attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500)
+
+ class Meta:
+ model = Tviit
+ fields = ['content', 'image']
|
030a786db1c0602125bfe4093c6a5709b0202858
|
app/hooks/views.py
|
app/hooks/views.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
from app import app, webhooks
@webhooks.hook(app.config.get('GITLAB_HOOK'), handler='gitlab')
class Gitlab:
def issue(self, data):
pass
def push(self, data):
pass
def tag_push(self, data):
pass
def merge_request(self, data):
pass
def commit_comment(self, data):
pass
def issue_comment(self, data):
pass
def merge_request_comment(self, data):
pass
def snippet_comment(self, data):
pass
|
from __future__ import absolute_import
from __future__ import unicode_literals
from app import app, webhooks
@webhooks.hook(
app.config.get('GITLAB_HOOK','/hooks/gitlab'),
handler='gitlab')
class Gitlab:
def issue(self, data):
pass
def push(self, data):
pass
def tag_push(self, data):
pass
def merge_request(self, data):
pass
def commit_comment(self, data):
pass
def issue_comment(self, data):
pass
def merge_request_comment(self, data):
pass
def snippet_comment(self, data):
pass
|
Add default hook url for gitlab
|
Add default hook url for gitlab
|
Python
|
apache-2.0
|
pipex/gitbot,pipex/gitbot,pipex/gitbot
|
---
+++
@@ -3,7 +3,9 @@
from app import app, webhooks
-@webhooks.hook(app.config.get('GITLAB_HOOK'), handler='gitlab')
+@webhooks.hook(
+ app.config.get('GITLAB_HOOK','/hooks/gitlab'),
+ handler='gitlab')
class Gitlab:
def issue(self, data):
pass
|
99580712595402cc84db3eed37e913b18cae1703
|
examples/marginal_ticks.py
|
examples/marginal_ticks.py
|
"""
Scatterplot with marginal ticks
===============================
_thumb: .68, .32
"""
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
sns.set(style="white", color_codes=True)
# Generate a random bivariate dataset
rs = np.random.RandomState(9)
mean = [0, 0]
cov = [(1, 0), (0, 2)]
x, y = rs.multivariate_normal(mean, cov, 100).T
# Use JointGrid directly to draw a custom plot
grid = sns.JointGrid(x, y, space=0, height=6, ratio=50)
grid.plot_joint(plt.scatter, color="g")
grid.plot_marginals(sns.rugplot, height=1, color="g")
|
"""
Scatterplot with marginal ticks
===============================
_thumb: .62, .39
"""
import numpy as np
import seaborn as sns
sns.set(style="white", color_codes=True)
# Generate a random bivariate dataset
rs = np.random.RandomState(9)
mean = [0, 0]
cov = [(1, 0), (0, 2)]
x, y = rs.multivariate_normal(mean, cov, 100).T
# Use JointGrid directly to draw a custom plot
grid = sns.JointGrid(x, y, space=0, height=6, ratio=20)
grid.plot_joint(sns.scatterplot, color="g")
grid.plot_marginals(sns.rugplot, height=1, color="g")
|
Fix thumbnail on gallery page
|
Fix thumbnail on gallery page
|
Python
|
bsd-3-clause
|
mwaskom/seaborn,arokem/seaborn,anntzer/seaborn,mwaskom/seaborn,anntzer/seaborn,arokem/seaborn
|
---
+++
@@ -2,11 +2,10 @@
Scatterplot with marginal ticks
===============================
-_thumb: .68, .32
+_thumb: .62, .39
"""
import numpy as np
import seaborn as sns
-import matplotlib.pyplot as plt
sns.set(style="white", color_codes=True)
# Generate a random bivariate dataset
@@ -16,6 +15,6 @@
x, y = rs.multivariate_normal(mean, cov, 100).T
# Use JointGrid directly to draw a custom plot
-grid = sns.JointGrid(x, y, space=0, height=6, ratio=50)
-grid.plot_joint(plt.scatter, color="g")
+grid = sns.JointGrid(x, y, space=0, height=6, ratio=20)
+grid.plot_joint(sns.scatterplot, color="g")
grid.plot_marginals(sns.rugplot, height=1, color="g")
|
8d339d610b57b40534af2a8d7cdbdaec041a995a
|
test/TestNGrams.py
|
test/TestNGrams.py
|
import unittest
import NGrams
class TestNGrams(unittest.TestCase):
def test_unigrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 1)
self.assertEqual(ngram_list, [['this'], ['is'], ['a'], ['random'],
['piece'], ['of'], ['text']])
def test_bigrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 2)
self.assertEqual(ngram_list, [['this', 'is'], ['is', 'a'],
['a', 'random'], ['random', 'piece'],
['piece', 'of'], ['of', 'text']])
def test_fourgrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 4)
self.assertEqual(ngram_list, [['this', 'is', 'a', 'random'],
['is', 'a', 'random', 'piece'],
['a', 'random', 'piece', 'of'],
['random', 'piece', 'of', 'text']])
if __name__ == '__main__':
unittest.main()
|
import unittest
import sys
sys.path.append('../src')
import NGrams
class TestNGrams(unittest.TestCase):
def test_unigrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 1)
self.assertEqual(ngram_list, [['this'], ['is'], ['a'], ['random'],
['piece'], ['of'], ['text']])
def test_bigrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 2)
self.assertEqual(ngram_list, [['this', 'is'], ['is', 'a'],
['a', 'random'], ['random', 'piece'],
['piece', 'of'], ['of', 'text']])
def test_fourgrams(self):
sentence = 'this is a random piece of text'
ngram_list = NGrams.generate_ngrams(sentence, 4)
self.assertEqual(ngram_list, [['this', 'is', 'a', 'random'],
['is', 'a', 'random', 'piece'],
['a', 'random', 'piece', 'of'],
['random', 'piece', 'of', 'text']])
if __name__ == '__main__':
unittest.main()
|
Add path in test to src
|
Add path in test to src
|
Python
|
bsd-2-clause
|
ambidextrousTx/RNLTK
|
---
+++
@@ -1,4 +1,6 @@
import unittest
+import sys
+sys.path.append('../src')
import NGrams
|
0243b5d468593edda6c207aaa124e8911a824751
|
src/argparser.py
|
src/argparser.py
|
"""ArgumentParser with Italian translation."""
import argparse
import sys
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
class ArgParser(argparse.ArgumentParser):
def __init__(self,
prog=None,
usage=None,
description=None,
epilog=None,
parents=None,
formatter_class=argparse.HelpFormatter,
prefix_chars='-',
fromfile_prefix_chars=None,
argument_default=None,
conflict_handler='error',
add_help=True,
allow_abbrev=True,
exit_on_error=True):
if parents is None:
parents = []
super().__init__(prog, usage, description, epilog, parents, formatter_class,
prefix_chars, fromfile_prefix_chars, argument_default,
conflict_handler, add_help, allow_abbrev, exit_on_error)
def set_default_subparser(self, name, args=None):
"""
Default subparser selection.
name: is the name of the subparser to call by default
args: if set is the argument list handed to parse_args()
"""
subparser_found = False
for arg in sys.argv[1:]:
if arg in ['-h', '--help']: # global help if no subparser
break
else:
for x in self._subparsers._actions:
if not isinstance(x, argparse._SubParsersAction):
continue
for sp_name in x._name_parser_map.keys():
if sp_name in sys.argv[1:]:
subparser_found = True
if not subparser_found:
# insert default in first position, this implies no
# global options without a sub_parsers specified
if args is None:
sys.argv.insert(1, name)
else:
args.insert(0, name)
|
"""ArgumentParser with Italian translation."""
import argparse
import sys
def _callable(obj):
return hasattr(obj, '__call__') or hasattr(obj, '__bases__')
class ArgParser(argparse.ArgumentParser):
def __init__(self, **kwargs):
if kwargs.get('parent', None) is None:
kwargs['parents'] = []
super().__init__(**kwargs)
def set_default_subparser(self, name, args=None):
"""
Default subparser selection.
name: is the name of the subparser to call by default
args: if set is the argument list handed to parse_args()
"""
subparser_found = False
for arg in sys.argv[1:]:
if arg in ['-h', '--help']: # global help if no subparser
break
else:
for x in self._subparsers._actions:
if not isinstance(x, argparse._SubParsersAction):
continue
for sp_name in x._name_parser_map.keys():
if sp_name in sys.argv[1:]:
subparser_found = True
if not subparser_found:
# insert default in first position, this implies no
# global options without a sub_parsers specified
if args is None:
sys.argv.insert(1, name)
else:
args.insert(0, name)
|
Fix crash in python 3.8 due to a mismatch on the ArgumentParser parameter
|
Fix crash in python 3.8 due to a mismatch on the ArgumentParser parameter
|
Python
|
mit
|
claudio-unipv/pvcheck,claudio-unipv/pvcheck
|
---
+++
@@ -9,25 +9,11 @@
class ArgParser(argparse.ArgumentParser):
- def __init__(self,
- prog=None,
- usage=None,
- description=None,
- epilog=None,
- parents=None,
- formatter_class=argparse.HelpFormatter,
- prefix_chars='-',
- fromfile_prefix_chars=None,
- argument_default=None,
- conflict_handler='error',
- add_help=True,
- allow_abbrev=True,
- exit_on_error=True):
- if parents is None:
- parents = []
- super().__init__(prog, usage, description, epilog, parents, formatter_class,
- prefix_chars, fromfile_prefix_chars, argument_default,
- conflict_handler, add_help, allow_abbrev, exit_on_error)
+ def __init__(self, **kwargs):
+ if kwargs.get('parent', None) is None:
+ kwargs['parents'] = []
+
+ super().__init__(**kwargs)
def set_default_subparser(self, name, args=None):
"""
|
314ba088f0c2cb8e47da22a8841127a17e4e222d
|
openacademy/model/openacademy_course.py
|
openacademy/model/openacademy_course.py
|
from openerp import models, fields
'''
This module create model of Course
'''
class Course(models.Model):
'''
this class model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
|
from openerp import api, models, fields
'''
This module create model of Course
'''
class Course(models.Model):
'''
this class model of Course
'''
_name = 'openacademy.course' # Model odoo name
name = fields.Char(string='Title', required=True)
description = fields.Text(string='Description')
responsible_id = fields.Many2one('res.users',
ondelete='set null', string="Responsible", index=True)
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
@api.one
def copy(self, default=None):
default = dict(default or {})
copied_count = self.search_count(
[('name', '=like', u"Copy of {}%".format(self.name))])
if not copied_count:
new_name = u"Copy of {}".format(self.name)
else:
new_name = u"Copy of {} ({})".format(self.name, copied_count)
default['name'] = new_name
return super(Course, self).copy(default)
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
"The title of the course should not be the description"),
('name_unique',
'UNIQUE(name)',
"The course title must be unique"),
]
|
Modify copy method into inherit
|
[REF] openacademy: Modify copy method into inherit
|
Python
|
apache-2.0
|
GavyMG/openacademy-proyect
|
---
+++
@@ -1,4 +1,4 @@
-from openerp import models, fields
+from openerp import api, models, fields
'''
This module create model of Course
'''
@@ -17,6 +17,20 @@
session_ids = fields.One2many(
'openacademy.session', 'course_id', string="Sessions")
+ @api.one
+ def copy(self, default=None):
+ default = dict(default or {})
+
+ copied_count = self.search_count(
+ [('name', '=like', u"Copy of {}%".format(self.name))])
+ if not copied_count:
+ new_name = u"Copy of {}".format(self.name)
+ else:
+ new_name = u"Copy of {} ({})".format(self.name, copied_count)
+
+ default['name'] = new_name
+ return super(Course, self).copy(default)
+
_sql_constraints = [
('name_description_check',
'CHECK(name != description)',
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.