commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
49f3c5bf5b95a7d678e541d93e0999f37f8a2b26
|
students/admin.py
|
students/admin.py
|
from django.contrib import admin
from .models import WhitelistedUsername
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
|
from django.contrib import admin
from .models import WhitelistedUsername
@admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
|
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
|
Use class decorator instead of floating statement to register WhitelistedUsernameAdmin class.
|
Python
|
mit
|
muhummadPatel/raspied,muhummadPatel/raspied,muhummadPatel/raspied
|
---
+++
@@ -3,8 +3,6 @@
from .models import WhitelistedUsername
+@admin.register(WhitelistedUsername)
class WhitelistedUsernameAdmin(admin.ModelAdmin):
pass
-
-
-admin.site.register(WhitelistedUsername, WhitelistedUsernameAdmin)
|
fde84efc866d2276eac5faed0af3df5a672664f5
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test webapp')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb --noinput')
local('python sana_builder/manage.py test webapp --noinput')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
Remove input from fab test
|
Remove input from fab test
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
---
+++
@@ -8,8 +8,8 @@
env.project_root = '/opt/sana.protocol_builder'
def test():
- local('python sana_builder/manage.py syncdb')
- local('python sana_builder/manage.py test webapp')
+ local('python sana_builder/manage.py syncdb --noinput')
+ local('python sana_builder/manage.py test webapp --noinput')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
|
3309fd5058294a9ee340fd3130d45711270b3062
|
daymetpy/__init__.py
|
daymetpy/__init__.py
|
__version__ = '0.0.2'
from daymetpy import daymet_timeseries
__all__ = ["daymet_timeseries"]
|
__version__ = '0.0.2'
try:
from daymetpy import daymet_timeseries
except ImportError:
from daymetpy.daymetpy import daymet_timeseries
__all__ = ["daymet_timeseries"]
|
Change to imports for 2-3 compatibility
|
Change to imports for 2-3 compatibility
|
Python
|
agpl-3.0
|
khufkens/daymetpy
|
---
+++
@@ -1,5 +1,8 @@
__version__ = '0.0.2'
-from daymetpy import daymet_timeseries
+try:
+ from daymetpy import daymet_timeseries
+except ImportError:
+ from daymetpy.daymetpy import daymet_timeseries
__all__ = ["daymet_timeseries"]
|
1001a61d345e1b3018eccfbd1cdb4a2111e23cca
|
example.py
|
example.py
|
import pyrc
import pyrc.utils.hooks as hooks
class GangstaBot(pyrc.Bot):
@hooks.command()
def bling(self, target, sender):
"will print yo"
if target.startswith("#"):
self.message(target, "%s: yo" % sender)
else:
self.message(target, "yo")
@hooks.command("^repeat\s+(?P<msg>.+)$")
def repeat(self, target, sender, **kwargs):
"will repeat whatever yo say"
if target.startswith("#"):
self.message(target, kwargs["msg"])
else:
self.message(sender, kwargs["msg"])
@hooks.privmsg("(lol|lmao|rofl(mao)?)")
def stopword(self, target, sender, *args):
"""
will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message
only applies to channel messages
"""
if target.startswith("#"):
self.message(target, args[0])
@hooks.interval(10000)
def keeprepeating(self):
"will say something"
self.message("#turntechgodhead", "stop repeating myself")
if __name__ == '__main__':
bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead'])
bot.connect()
|
import pyrc
import pyrc.utils.hooks as hooks
class GangstaBot(pyrc.Bot):
@hooks.command()
def info(self, target, sender):
"will print the target and sender to the console"
print("target: %s, sender: %s" % (target, sender))
@hooks.command()
def bling(self, target, sender):
"will print yo"
if target.startswith("#"):
self.message(target, "%s: yo" % sender)
else:
self.message(sender, "yo")
@hooks.command("^repeat\s+(?P<msg>.+)$")
def repeat(self, target, sender, **kwargs):
"will repeat whatever yo say"
if target.startswith("#"):
self.message(target, kwargs["msg"])
else:
self.message(sender, kwargs["msg"])
@hooks.privmsg("(lol|lmao|rofl(mao)?)")
def stopword(self, target, sender, *args):
"""
will repeat 'lol', 'lmao, 'rofl' or 'roflmao' when seen in a message
only applies to channel messages
"""
if target.startswith("#"):
self.message(target, args[0])
@hooks.interval(10000)
def keeprepeating(self):
"will say something"
self.message("#turntechgodhead", "stop repeating myself")
if __name__ == '__main__':
bot = GangstaBot('irc.freenode.net', channels = ['#turntechgodhead'])
bot.connect()
|
Fix bling() and add in info function to report target & sender.
|
Fix bling() and add in info function to report target & sender.
|
Python
|
mit
|
sarenji/pyrc
|
---
+++
@@ -2,13 +2,18 @@
import pyrc.utils.hooks as hooks
class GangstaBot(pyrc.Bot):
+ @hooks.command()
+ def info(self, target, sender):
+ "will print the target and sender to the console"
+ print("target: %s, sender: %s" % (target, sender))
+
@hooks.command()
def bling(self, target, sender):
"will print yo"
if target.startswith("#"):
self.message(target, "%s: yo" % sender)
else:
- self.message(target, "yo")
+ self.message(sender, "yo")
@hooks.command("^repeat\s+(?P<msg>.+)$")
def repeat(self, target, sender, **kwargs):
|
0607ff6a3a787286b174af1cb441eb1d1447b634
|
fabfile.py
|
fabfile.py
|
import os
from fabric.api import *
LOCAL_ROOT = os.path.dirname(os.path.realpath(__file__))
LOCAL_VIRTUALENV = '~/.virtualenv/tomo'
TOMO_HOST = 'www.projekt-tomo.si'
env.hosts = [TOMO_HOST]
# MAIN TASKS
@task
def test():
with lcd(LOCAL_ROOT), activate_virtualenv():
with lcd('web'):
local('./manage.py test')
@task
def deploy():
with cd('/home/gregor/docker/'):
sudo('docker-compose pull')
sudo('docker-compose up -d')
migrate()
@task
def migrate():
manage('migrate')
@task
def ls():
manage('help')
# AUXILLIARY FUNCTIONS
def activate_virtualenv():
return prefix('source {}/bin/activate'.format(LOCAL_VIRTUALENV))
def manage(command):
tomo_docker('python3 projekt-tomo/web/manage.py {}'.format(command))
def tomo_docker(command):
sudo('docker exec docker_tomo_1 {}'.format(command))
|
import os
from fabric.api import *
LOCAL_ROOT = os.path.dirname(os.path.realpath(__file__))
LOCAL_VIRTUALENV = '~/.virtualenv/tomo'
TOMO_HOST = 'www.projekt-tomo.si'
env.hosts = [TOMO_HOST]
# MAIN TASKS
@task
def test():
with lcd(LOCAL_ROOT), activate_virtualenv():
with lcd('web'):
local('./manage.py test')
@task
def deploy():
with cd('/home/gregor/docker/'):
sudo('docker-compose pull')
sudo('docker-compose up -d')
migrate()
@task
def quick_deploy():
tomo_docker('bash -c "cd projekt-tomo && git pull"')
migrate()
manage('collectstatic --noinput')
tomo_docker('uwsgi --reload /tmp/project-master.pid')
@task
def migrate():
manage('migrate')
@task
def ls():
manage('help')
# AUXILLIARY FUNCTIONS
def activate_virtualenv():
return prefix('source {}/bin/activate'.format(LOCAL_VIRTUALENV))
def manage(command):
tomo_docker('python3 projekt-tomo/web/manage.py {}'.format(command))
def tomo_docker(command):
sudo('docker exec docker_tomo_1 {}'.format(command))
|
Enable quick deploys in fabric
|
Enable quick deploys in fabric
|
Python
|
agpl-3.0
|
matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo
|
---
+++
@@ -26,6 +26,14 @@
@task
+def quick_deploy():
+ tomo_docker('bash -c "cd projekt-tomo && git pull"')
+ migrate()
+ manage('collectstatic --noinput')
+ tomo_docker('uwsgi --reload /tmp/project-master.pid')
+
+
+@task
def migrate():
manage('migrate')
|
2f3ffa846c67f9b746855f1f9ec39d861a3e95b9
|
libraries/vytree/__init__.py
|
libraries/vytree/__init__.py
|
# vytree.__init__: package init file.
#
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from vytree.node import (
Node,
ChildNotFoundError,
ChildAlreadyExistsError,
)
from vytree.config_node import ConfigNode
|
# vytree.__init__: package init file.
#
# Copyright (C) 2014 VyOS Development Group <maintainers@vyos.net>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301
# USA
from vytree.node import (
Node,
ChildNotFoundError,
ChildAlreadyExistsError,
)
from vytree.config_node import ConfigNode
from vytree.reference_node import ReferenceNode
from vytree.reference_tree_loader import ReferenceTreeLoader
|
Add reference tree loader to imports.
|
Add reference tree loader to imports.
|
Python
|
lgpl-2.1
|
vyos-legacy/vyconfd,vyos-legacy/vyconfd
|
---
+++
@@ -25,3 +25,7 @@
)
from vytree.config_node import ConfigNode
+
+from vytree.reference_node import ReferenceNode
+
+from vytree.reference_tree_loader import ReferenceTreeLoader
|
709252b84cd4b7f1f00e26980c2998db9b1495e5
|
llvmlite/tests/test_dylib.py
|
llvmlite/tests/test_dylib.py
|
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
from ctypes.util import find_library
import unittest
@unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
def test_libm(self):
try:
if self.system == "Linux":
libm = find_library("m")
elif self.system == "Darwin":
libm = find_library("libm")
dylib.load_library_permanently(libm)
except Exception:
self.fail("Valid call to link library should not fail.")
|
from . import TestCase
from llvmlite import binding as llvm
from llvmlite.binding import dylib
import platform
from ctypes.util import find_library
import unittest
@unittest.skipUnless(platform.system() in ["Linux", "Darwin"], "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
llvm.initialize_native_target()
llvm.initialize_native_asmprinter()
self.system = platform.system()
def test_bad_library(self):
with self.assertRaises(Exception) as context:
dylib.load_library_permanently("zzzasdkf;jasd;l")
if self.system == "Linux":
self.assertTrue('zzzasdkf;jasd;l: cannot open shared object file: No such file or directory'
in str(context.exception))
elif self.system == "Darwin":
self.assertTrue('dlopen(zzzasdkf;jasd;l, 9): image not found'
in str(context.exception))
def test_libm(self):
try:
if self.system == "Linux":
libm = find_library("m")
elif self.system == "Darwin":
libm = find_library("libm")
dylib.load_library_permanently(libm)
except Exception:
self.fail("Valid call to link library should not fail.")
|
Fix syntax for python 2.6
|
Fix syntax for python 2.6
|
Python
|
bsd-2-clause
|
markdewing/llvmlite,markdewing/llvmlite,numba/llvmlite,sklam/llvmlite,sklam/llvmlite,m-labs/llvmlite,pitrou/llvmlite,markdewing/llvmlite,pitrou/llvmlite,sklam/llvmlite,m-labs/llvmlite,squisher/llvmlite,sklam/llvmlite,ssarangi/llvmlite,numba/llvmlite,ssarangi/llvmlite,ssarangi/llvmlite,pitrou/llvmlite,squisher/llvmlite,ssarangi/llvmlite,numba/llvmlite,pitrou/llvmlite,squisher/llvmlite,markdewing/llvmlite,m-labs/llvmlite,numba/llvmlite,m-labs/llvmlite,squisher/llvmlite
|
---
+++
@@ -6,7 +6,7 @@
from ctypes.util import find_library
import unittest
-@unittest.skipUnless(platform.system() in {"Linux", "Darwin"}, "Unsupport test for current OS")
+@unittest.skipUnless(platform.system() in ["Linux", "Darwin"], "Unsupport test for current OS")
class TestDylib(TestCase):
def setUp(self):
llvm.initialize()
|
3ccfed2e70e6da68452d466353c7b0df1ff9811c
|
cricinfo/my_bot.py
|
cricinfo/my_bot.py
|
import requests
from bs4 import BeautifulSoup
CRICINFO_RSS_URL = 'http://static.cricinfo.com/rss/livescores.xml'
# Fetching matches
def get_matches():
r = requests.get(CRICINFO_RSS_URL)
soup = BeautifulSoup(r.text)
return soup.find_all('item')
matches = get_matches()
for match in matches:
print match.contents['title']
print match.contents['description']
print match.contents['link']
|
import requests
from bs4 import BeautifulSoup
import xmltodict
import click
CRICINFO_RSS_URL = 'http://static.cricinfo.com/rss/livescores.xml'
class Match(object):
def __init__(self, title, link, description, guid):
self.title = title
self.link = link
self.description = description
self.guid = guid
@staticmethod
def from_xml(xml):
item = xmltodict.parse(xml)['item']
return Match(item['title'], item['link'], item['description'], item['guid'])
def __repr__(self):
return self.title
def get_matches():
"""Fetches matches from the cricinfo url"""
r = requests.get(CRICINFO_RSS_URL)
soup = BeautifulSoup(r.text)
for match in soup.find_all('item'):
yield Match.from_xml(str(match))
def print_posts(matches):
"""Prints all matches to the console."""
click.echo()
for match in matches:
click.secho('%s\t' % match.title, bold=True, fg="red", nl=False)
click.echo()
@click.command()
def main():
"""A cli to Cricinfo to see live scores"""
# fetch matches
matches = get_matches()
# print matches
print_posts(matches)
if __name__ == '__main__':
main()
|
Add fetching live scores feature
|
Add fetching live scores feature
|
Python
|
mit
|
voidabhi/cricinfo,voidabhi/cricinfo
|
---
+++
@@ -2,17 +2,51 @@
import requests
from bs4 import BeautifulSoup
+import xmltodict
+import click
CRICINFO_RSS_URL = 'http://static.cricinfo.com/rss/livescores.xml'
-# Fetching matches
+class Match(object):
+
+ def __init__(self, title, link, description, guid):
+ self.title = title
+ self.link = link
+ self.description = description
+ self.guid = guid
+
+ @staticmethod
+ def from_xml(xml):
+ item = xmltodict.parse(xml)['item']
+ return Match(item['title'], item['link'], item['description'], item['guid'])
+
+ def __repr__(self):
+ return self.title
+
def get_matches():
+ """Fetches matches from the cricinfo url"""
+
r = requests.get(CRICINFO_RSS_URL)
soup = BeautifulSoup(r.text)
- return soup.find_all('item')
+ for match in soup.find_all('item'):
+ yield Match.from_xml(str(match))
-matches = get_matches()
-for match in matches:
- print match.contents['title']
- print match.contents['description']
- print match.contents['link']
+def print_posts(matches):
+ """Prints all matches to the console."""
+
+ click.echo()
+ for match in matches:
+ click.secho('%s\t' % match.title, bold=True, fg="red", nl=False)
+ click.echo()
+
+@click.command()
+def main():
+ """A cli to Cricinfo to see live scores"""
+
+ # fetch matches
+ matches = get_matches()
+ # print matches
+ print_posts(matches)
+
+if __name__ == '__main__':
+ main()
|
8128107f10971a61f3d0057bcb9e9ea8413b8cef
|
python/render/render_tracks.py
|
python/render/render_tracks.py
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({})'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
Add missing close parenthesis on track dict
|
Add missing close parenthesis on track dict
|
Python
|
mit
|
Duke-GCB/TrackHubGenerator,Duke-GCB/TrackHubGenerator
|
---
+++
@@ -9,7 +9,7 @@
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding sites'.format(metadata['protein'], metadata['serial_number'])
- d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({})'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
+ d['long_label'] = 'Predicted {} binding sites (site width = {}, model identifier {}({}))'.format(metadata['protein'], metadata['width'], metadata['serial_number'], metadata['author_identifier'])
return d
def render_tracks(assembly, metadata_file):
|
0b82789823dbfa1fc74af0eee7b8911783519f91
|
scripts/starting_py_program.py
|
scripts/starting_py_program.py
|
#!/usr/bin/env python3
# from __future__ import print_function #(if python2)
import sys
def eprint(*args, **kwargs):
""" Just like the print function, but on stderr
"""
print(*args, file=sys.stderr, **kwargs)
def main(argv=None):
""" Program starting point, it can started by the OS or as normal function
If it's a normal function argv won't be None if started by the OS
argv is initialized by the command line arguments
"""
if argv is None:
argv = sys.argv
return 0
if __name__ == "__main__":
sys.exit(main())
|
#!/usr/bin/env python3
# from __future__ import print_function #(if python2)
import sys
import os
def eprint(*args, **kwargs):
""" Just like the print function, but on stderr
"""
print(*args, file=sys.stderr, **kwargs)
def main(argv=None):
""" Program starting point, it can started by the OS or as normal function
If it's a normal function argv won't be None if started by the OS
argv is initialized by the command line arguments
"""
prgname = os.path.basename(__file__) if '__file__' in globals() else 'prg'
if argv is None:
argv = sys.argv
return 0
if __name__ == "__main__":
sys.exit(main())
|
Add the program name to starting py
|
Add the program name to starting py
|
Python
|
unlicense
|
paolobolzoni/useful-conf,paolobolzoni/useful-conf,paolobolzoni/useful-conf
|
---
+++
@@ -3,6 +3,7 @@
# from __future__ import print_function #(if python2)
import sys
+import os
def eprint(*args, **kwargs):
@@ -16,8 +17,10 @@
If it's a normal function argv won't be None if started by the OS
argv is initialized by the command line arguments
"""
+ prgname = os.path.basename(__file__) if '__file__' in globals() else 'prg'
if argv is None:
argv = sys.argv
+
return 0
if __name__ == "__main__":
|
edd6368f4b21372e268adef04b9beb85d5603f40
|
txircd/modules/cmd_names.py
|
txircd/modules/cmd_names.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class NamesCommand(Command):
def onUse(self, user, data):
for chan in data["targetchan"]:
user.report_names(chan)
def processParams(self, user, params):
if user.registered > 0:
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
else:
user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
}
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn(self):
return {
"commands": {
"NAMES": NamesCommand()
}
}
def cleanup(self):
del self.ircd.commands["NAMES"]
|
Send "no such channel" message on NAMES with a nonexistent channel
|
Send "no such channel" message on NAMES with a nonexistent channel
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd
|
---
+++
@@ -11,13 +11,15 @@
user.sendMessage(irc.ERR_NOTREGISTERED, "NAMES", ":You have not registered")
return {}
if params:
- channels = filter(lambda x: x in user.channels and x in self.ircd.channels, params[0].split(","))
+ channels = params[0].split(",")
else:
channels = user.channels.keys()
chan_param = []
for chan in channels:
if chan in self.ircd.channels:
chan_param.append(self.ircd.channels[chan])
+ else:
+ user.sendMessage(irc.ERR_NOSUCHNICK, chan, ":No such nick/channel")
return {
"user": user,
"targetchan": chan_param
|
0adf184e841bedffa118e85eda94ff099862cb6f
|
examsys/urls.py
|
examsys/urls.py
|
from django.conf.urls import patterns, url
from examsys import views
urlpatterns = patterns('',
# (r'^$', lambda r: HttpResponseRedirect('examsys/'))
url(r'^$', views.index, name='index'),
url(r'^login/', views.login, name='login'),
url(r'^logout/', views.logout, name='logout'),
url(r'^register/', views.register, name='register'),
)
|
from django.conf.urls import patterns, url
from examsys import views
urlpatterns = patterns('',
# (r'^$', lambda r: HttpResponseRedirect('examsys/'))
url(r'^$', views.index, name='index'),
url(r'^login/', views.login, name='login'),
url(r'^logout/', views.logout, name='logout'),
url(r'^register/', views.register, name='register'),
url(r'^choosetest/', views.choosetest, name='choosetest'),
url(r'^(?P<test_id>\d+)/$', views.taketest, name='taketest'),
)
|
Add the choose test and take test to URLs
|
Add the choose test and take test to URLs
|
Python
|
mit
|
icyflame/test-taking-platform,icyflame/test-taking-platform
|
---
+++
@@ -8,4 +8,7 @@
url(r'^login/', views.login, name='login'),
url(r'^logout/', views.logout, name='logout'),
url(r'^register/', views.register, name='register'),
+ url(r'^choosetest/', views.choosetest, name='choosetest'),
+ url(r'^(?P<test_id>\d+)/$', views.taketest, name='taketest'),
+
)
|
10e4efb1b28eb6b32f0cef3eee510f9a6e0b6909
|
src/foremast/plugin_manager.py
|
src/foremast/plugin_manager.py
|
"""Manager to handle plugins"""
import pathlib
from pluginbase import PluginBase
from .exceptions import PluginNotFound
class PluginManager:
"""Class to manage and create Spinnaker applications
Args:
paths (str): Path of plugin directory.
provider (str): The name of the cloud provider.
"""
def __init__(self, paths, provider):
path = pathlib.Path(__file__).parent.resolve()
path = path / paths
all_paths = [str(path)]
self.paths = all_paths
self.provider = provider
plugin_base = PluginBase(package='foremast.plugins')
self.plugin_source = plugin_base.make_plugin_source(searchpath=self.paths, persist=True)
def plugins(self):
"""List of all plugins available."""
for plugin in self.plugin_source.list_plugins():
yield plugin
def load(self):
"""Load the plugin."""
try:
loaded_plugin = self.plugin_source.load_plugin(self.provider)
except ModuleNotFoundError:
raise PluginNotFound('No plugin found for provider {} in {}'.format(self.provider, self.paths))
return loaded_plugin
|
"""Manager to handle plugins"""
import pathlib
from pluginbase import PluginBase
from .exceptions import PluginNotFound
class PluginManager:
"""Class to manage and create Spinnaker applications
Args:
paths (str): Path of plugin directory.
provider (str): The name of the cloud provider.
"""
def __init__(self, resource, provider):
path = pathlib.Path(__file__).parent.resolve()
path = path / resource
all_paths = [str(path)]
self.paths = all_paths
self.provider = provider
plugin_base = PluginBase(package='foremast.plugins')
self.plugin_source = plugin_base.make_plugin_source(searchpath=self.paths, persist=True)
def plugins(self):
"""List of all plugins available."""
for plugin in self.plugin_source.list_plugins():
yield plugin
def load(self):
"""Load the plugin."""
try:
loaded_plugin = self.plugin_source.load_plugin(self.provider)
except ModuleNotFoundError:
raise PluginNotFound('No plugin found for provider {} in {}'.format(self.provider, self.paths))
return loaded_plugin
|
Rename parameter to more appropriate name
|
fix: Rename parameter to more appropriate name
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
---
+++
@@ -14,9 +14,9 @@
provider (str): The name of the cloud provider.
"""
- def __init__(self, paths, provider):
+ def __init__(self, resource, provider):
path = pathlib.Path(__file__).parent.resolve()
- path = path / paths
+ path = path / resource
all_paths = [str(path)]
|
fdcb0bc502ea3976f7edd613f0bdb0857104fc82
|
examples/ags_rockstar.py
|
examples/ags_rockstar.py
|
from rockstar import RockStar
ags_code = "Display("Hello, world!");"
rock_it_bro = RockStar(days=777, file_name='helloworld.asc', code=ags_code)
rock_it_bro.make_me_a_rockstar()
|
from rockstar import RockStar
ags_code = 'Display("Hello, world!");'
rock_it_bro = RockStar(days=777, file_name='helloworld.asc', code=ags_code)
rock_it_bro.make_me_a_rockstar()
|
Fix AGS (Adventure Game Studio) example
|
Fix AGS (Adventure Game Studio) example
|
Python
|
mit
|
jehb/rockstar,monsterwater/rockstar,avinassh/rockstar,Endika/rockstar,yask123/rockstar,ActuallyACat/rockstar,varunparkhe/rockstar,jrajath94/RockStar,haosdent/rockstar,gokaygurcan/rockstar
|
---
+++
@@ -1,5 +1,5 @@
from rockstar import RockStar
-ags_code = "Display("Hello, world!");"
+ags_code = 'Display("Hello, world!");'
rock_it_bro = RockStar(days=777, file_name='helloworld.asc', code=ags_code)
rock_it_bro.make_me_a_rockstar()
|
98acdc9262cfa8c5da092e0c3b1264afdcbde66a
|
locations/spiders/speedway.py
|
locations/spiders/speedway.py
|
# -*- coding: utf-8 -*-
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "superamerica"
allowed_domains = ["superamerica.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
# -*- coding: utf-8 -*-
import scrapy
import json
from locations.items import GeojsonPointItem
class SuperAmericaSpider(scrapy.Spider):
name = "speedway"
allowed_domains = ["www.speedway.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
def parse(self, response):
yield scrapy.Request(
'https://www.speedway.com/Services/StoreService.svc/getstoresbyproximity',
callback=self.parse_search,
method='POST',
body='{"latitude":45.0,"longitude":-90.0,"radius":-1,"limit":0}',
headers={
'Content-Type': 'application/json;charset=UTF-8',
'Accept': 'application/json',
}
)
def parse_search(self, response):
data = json.loads(response.body_as_unicode())
for store in data:
properties = {
'addr:full': store['address'],
'addr:city': store['city'],
'addr:state': store['state'],
'addr:postcode': store['zip'],
'phone': store['phoneNumber'],
'ref': store['costCenterId'],
}
lon_lat = [
store['longitude'],
store['latitude'],
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
|
Correct the name of the spider
|
Correct the name of the spider
|
Python
|
mit
|
iandees/all-the-places,iandees/all-the-places,iandees/all-the-places
|
---
+++
@@ -6,8 +6,8 @@
class SuperAmericaSpider(scrapy.Spider):
- name = "superamerica"
- allowed_domains = ["superamerica.com"]
+ name = "speedway"
+ allowed_domains = ["www.speedway.com"]
start_urls = (
'https://www.speedway.com/GasPriceSearch',
)
|
19e26d09659dc4db6bcd27565dacd458b7e3e4cd
|
symposion/proposals/management/commands/ensure_proposal_records.py
|
symposion/proposals/management/commands/ensure_proposal_records.py
|
"""
Management command to make sure the permissions exist
for all kinds of proposals.
"""
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
def handle_noargs(self, **options):
from symposion.proposals.kinds import ensure_proposal_records
ensure_proposal_records()
|
"""
Management command to make sure the permissions exist
for all kinds of proposals.
"""
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
from symposion.proposals.kinds import ensure_proposal_records
ensure_proposal_records()
|
Use BaseCommand instead of NoArgsCommand
|
Use BaseCommand instead of NoArgsCommand
|
Python
|
bsd-3-clause
|
PyCon/pycon,njl/pycon,njl/pycon,njl/pycon,PyCon/pycon,njl/pycon,PyCon/pycon,PyCon/pycon
|
---
+++
@@ -2,12 +2,12 @@
Management command to make sure the permissions exist
for all kinds of proposals.
"""
-from django.core.management.base import NoArgsCommand
+from django.core.management.base import BaseCommand
-class Command(NoArgsCommand):
+class Command(BaseCommand):
- def handle_noargs(self, **options):
+ def handle(self, *args, **options):
from symposion.proposals.kinds import ensure_proposal_records
ensure_proposal_records()
|
dcda634f00d2e04e1c77bca14059a9df1a1fdc5c
|
ghtools/command/login.py
|
ghtools/command/login.py
|
from __future__ import print_function
from getpass import getpass, _raw_input
import logging
import sys
from argh import ArghParser, arg
from ghtools import cli
from ghtools.api import envkey, GithubAPIClient
log = logging.getLogger(__name__)
parser = ArghParser()
def login_if_needed(gh, scopes):
if gh.logged_in:
log.info("Already logged in")
return
print("Please log into GitHub ({0})".format(gh.nickname or "public"),
file=sys.stderr)
username = _raw_input("Username: ")
password = getpass("Password: ")
gh.login(username, password, scopes=scopes)
@arg('-s', '--scope',
default=None,
action='append',
help='GitHub auth scopes to request')
@arg('github',
nargs='?',
help='GitHub instance nickname (e.g "enterprise")')
def login(args):
"""
Log into a GitHub instance, and print the resulting OAuth token.
"""
with cli.catch_api_errors():
client = GithubAPIClient(args.github)
login_if_needed(client, args.scope)
oauth_token_key = envkey(client.nickname, 'oauth_token')
print("export {0}='{1}'".format(oauth_token_key, client.token))
parser.set_default_command(login)
def main():
parser.dispatch()
if __name__ == '__main__':
main()
|
from __future__ import print_function
from getpass import getpass, _raw_input
import logging
import sys
from argh import ArghParser, arg
from ghtools import cli
from ghtools.api import envkey, GithubAPIClient
log = logging.getLogger(__name__)
parser = ArghParser()
def login_if_needed(gh, scopes):
if gh.logged_in:
log.info("Already logged in")
return
print("Please log into GitHub ({0})".format(gh.nickname or "public"),
file=sys.stderr)
username = _raw_input("Username: ")
password = getpass("Password: ")
gh.login(username, password, scopes=scopes)
@arg('-s', '--scope',
default=None,
action='append',
help='GitHub auth scopes to request')
@arg('github',
nargs='?',
help='GitHub instance nickname (e.g "enterprise")')
def login(args):
"""
Log into a GitHub instance, and print the resulting OAuth token.
"""
with cli.catch_api_errors():
client = GithubAPIClient(nickname=args.github)
login_if_needed(client, args.scope)
oauth_token_key = envkey(client.nickname, 'oauth_token')
print("export {0}='{1}'".format(oauth_token_key, client.token))
parser.set_default_command(login)
def main():
parser.dispatch()
if __name__ == '__main__':
main()
|
Fix broken GithubAPIClient constructor args
|
Fix broken GithubAPIClient constructor args
|
Python
|
mit
|
alphagov/ghtools
|
---
+++
@@ -37,7 +37,7 @@
Log into a GitHub instance, and print the resulting OAuth token.
"""
with cli.catch_api_errors():
- client = GithubAPIClient(args.github)
+ client = GithubAPIClient(nickname=args.github)
login_if_needed(client, args.scope)
oauth_token_key = envkey(client.nickname, 'oauth_token')
|
298a90c942bd44f920e1b12ea0af384b7f06c6f1
|
gitless/cli/gl_switch.py
|
gitless/cli/gl_switch.py
|
# -*- coding: utf-8 -*-
# Gitless - a version control system built on top of Git.
# Licensed under GNU GPL v2.
"""gl switch - Switch branches."""
from __future__ import unicode_literals
from . import pprint
def parser(subparsers, _):
"""Adds the switch parser to the given subparsers object."""
desc = 'switch branches'
switch_parser = subparsers.add_parser(
'switch', help=desc, description=desc.capitalize())
switch_parser.add_argument('branch', help='switch to branch')
switch_parser.add_argument(
'-mo', '--move-over',
help='move uncomitted changes made in the current branch to the '
'destination branch',
action='store_true')
switch_parser.set_defaults(func=main)
def main(args, repo):
b = repo.lookup_branch(args.branch)
if not b:
pprint.err('Branch {0} doesn\'t exist'.format(args.branch))
pprint.err_exp('to list existing branches do gl branch')
return False
repo.switch_current_branch(b, move_over=args.move_over)
pprint.ok('Switched to branch {0}'.format(args.branch))
return True
|
# -*- coding: utf-8 -*-
# Gitless - a version control system built on top of Git.
# Licensed under GNU GPL v2.
"""gl switch - Switch branches."""
from __future__ import unicode_literals
from . import pprint
def parser(subparsers, _):
"""Adds the switch parser to the given subparsers object."""
desc = 'switch branches'
switch_parser = subparsers.add_parser(
'switch', help=desc, description=desc.capitalize())
switch_parser.add_argument('branch', help='switch to branch')
switch_parser.add_argument(
'-mo', '--move-over',
help='move uncomitted changes made in the current branch to the '
'destination branch',
action='store_true')
switch_parser.set_defaults(func=main)
def main(args, repo):
b = repo.lookup_branch(args.branch)
if not b:
pprint.err('Branch {0} doesn\'t exist'.format(args.branch))
pprint.err_exp('to list existing branches do gl branch')
pprint.err_exp('to create a new branch do gl branch -c feature/foo')
return False
repo.switch_current_branch(b, move_over=args.move_over)
pprint.ok('Switched to branch {0}'.format(args.branch))
return True
|
Make `switch` command a bit more helpful
|
Make `switch` command a bit more helpful
|
Python
|
mit
|
sdg-mit/gitless,sdg-mit/gitless
|
---
+++
@@ -30,6 +30,7 @@
if not b:
pprint.err('Branch {0} doesn\'t exist'.format(args.branch))
pprint.err_exp('to list existing branches do gl branch')
+ pprint.err_exp('to create a new branch do gl branch -c feature/foo')
return False
repo.switch_current_branch(b, move_over=args.move_over)
|
6bf1bceebc9acc724dd9831554ea582eabf82d08
|
tools/telemetry/telemetry/core/chrome/inspector_memory_unittest.py
|
tools/telemetry/telemetry/core/chrome/inspector_memory_unittest.py
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.test import tab_test_case
class InspectorMemoryTest(tab_test_case.TabTestCase):
def testGetDOMStats(self):
unittest_data_dir = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'unittest_data')
self._browser.SetHTTPServerDirectories(unittest_data_dir)
self._tab.Navigate(
self._browser.http_server.UrlOf('dom_counter_sample.html'))
self._tab.WaitForDocumentReadyStateToBeComplete()
counts = self._tab.dom_stats
self.assertEqual(counts['document_count'], 1)
self.assertEqual(counts['node_count'], 14)
self.assertEqual(counts['event_listener_count'], 2)
|
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.test import tab_test_case
class InspectorMemoryTest(tab_test_case.TabTestCase):
def testGetDOMStats(self):
unittest_data_dir = os.path.join(os.path.dirname(__file__),
'..', '..', '..', 'unittest_data')
self._browser.SetHTTPServerDirectories(unittest_data_dir)
# Due to an issue with CrOS, we create a new tab here rather than
# using self._tab to get a consistent starting page on all platforms
tab = self._browser.tabs.New()
tab.Navigate(
self._browser.http_server.UrlOf('dom_counter_sample.html'))
tab.WaitForDocumentReadyStateToBeComplete()
counts = tab.dom_stats
self.assertEqual(counts['document_count'], 2)
self.assertEqual(counts['node_count'], 18)
self.assertEqual(counts['event_listener_count'], 2)
|
Fix InspectorMemoryTest.testGetDOMStats to have consistent behaviour on CrOS and desktop versions of Chrome. Starting the browser in CrOS requires navigating through an initial setup that does not leave us with a tab at "chrome://newtab". This workaround runs the test in a new tab on all platforms for consistency.
|
Fix InspectorMemoryTest.testGetDOMStats to have consistent
behaviour on CrOS and desktop versions of Chrome. Starting the
browser in CrOS requires navigating through an initial setup
that does not leave us with a tab at "chrome://newtab". This workaround
runs the test in a new tab on all platforms for consistency.
BUG=235634
TEST=InspectorMemoryTest.testGetDOMStats passes on cros and system
NOTRY=true
Review URL: https://chromiumcodereview.appspot.com/14672002
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@197490 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,anirudhSK/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,patrickm/chromium.src,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,Fireblend/chromium-crosswalk,hujiajie/pa-chromium,dushu1203/chromium.src,ltilve/chromium,dednal/chromium.src,hujiajie/pa-chromium,markYoungH/chromium.src,Chilledheart/chromium,patrickm/chromium.src,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,ltilve/chromium,M4sse/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,ondra-novak/chromium.src,axinging/chromium-crosswalk,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,hujiajie/pa-chromium,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,jaruba/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,littlstar/chromium.src,ltilve/chromium,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,dushu1203/chromium.src,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,jaruba/chromium.src,littlstar/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,jaruba/chromium.src,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,dednal/chromium.src,mogoweb/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,hujiajie/pa-chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,littlstar/chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,anirudhSK/chromium,patrickm/chromium.src,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,patrickm/chromium.src,Jonekee/chromium.src,littlstar/chromium.src,ltilve/chromium,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,bright-sparks/chromium-spacewalk,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,Just-D/chromium-1,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,M4sse/chromium.src,Jonekee/chromium.src,ondra-novak/chromium.src,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,dednal/chromium.src,jaruba/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ltilve/chromium,krieger-od/nwjs_chromium.src,patrickm/chromium.src,M4sse/chromium.src,hujiajie/pa-chromium,dednal/chromium.src,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,jaruba/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,Jonekee/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,ltilve/chromium,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,Chilledheart/chromium,dednal/chromium.src,patrickm/chromium.src
|
---
+++
@@ -11,11 +11,15 @@
'..', '..', '..', 'unittest_data')
self._browser.SetHTTPServerDirectories(unittest_data_dir)
- self._tab.Navigate(
+ # Due to an issue with CrOS, we create a new tab here rather than
+ # using self._tab to get a consistent starting page on all platforms
+ tab = self._browser.tabs.New()
+
+ tab.Navigate(
self._browser.http_server.UrlOf('dom_counter_sample.html'))
- self._tab.WaitForDocumentReadyStateToBeComplete()
+ tab.WaitForDocumentReadyStateToBeComplete()
- counts = self._tab.dom_stats
- self.assertEqual(counts['document_count'], 1)
- self.assertEqual(counts['node_count'], 14)
+ counts = tab.dom_stats
+ self.assertEqual(counts['document_count'], 2)
+ self.assertEqual(counts['node_count'], 18)
self.assertEqual(counts['event_listener_count'], 2)
|
243523ee5e70a94914de23d8444478425b7bb782
|
alg_topological_sort.py
|
alg_topological_sort.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
pass
def main():
# DAG.
dag_adjacency_dict = {
'A': ['D'],
'B': ['D'],
'C': ['D'],
'D': ['E', 'G'],
'E': ['J'],
'F': ['G'],
'G': ['I'],
'I': ['J'],
'J': []
}
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def _previsit():
pass
def _postvisit():
pass
def _dfs_explore():
pass
def topological_sort():
"""Topological Sorting for Directed Acyclic Graph (DAG).
To topologically sort a DAG, we simply do depth first search,
then arrange DAG's vertices in decreasing postvisit order.
"""
pass
def main():
# DAG.
dag_adjacency_dict = {
'A': ['D'],
'B': ['D'],
'C': ['D'],
'D': ['E', 'G'],
'E': ['J'],
'F': ['G'],
'G': ['I'],
'I': ['J'],
'J': []
}
if __name__ == '__main__':
main()
|
Add helper methods and revise doc string
|
Add helper methods and revise doc string
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
---
+++
@@ -3,8 +3,21 @@
from __future__ import division
+def _previsit():
+ pass
+
+def _postvisit():
+ pass
+
+def _dfs_explore():
+ pass
+
def topological_sort():
- """Topological Sorting for Directed Acyclic Graph (DAG)."""
+ """Topological Sorting for Directed Acyclic Graph (DAG).
+
+ To topologically sort a DAG, we simply do depth first search,
+ then arrange DAG's vertices in decreasing postvisit order.
+ """
pass
|
10f48dda3337cdb2778d76dee0df1ed4e5601439
|
apps/storybase/utils.py
|
apps/storybase/utils.py
|
"""Shared utility functions"""
from django.conf import settings
from django.template.defaultfilters import slugify as django_slugify
from django.utils.translation import ugettext_lazy as _
def get_language_name(language_code):
"""Convert a language code into its full (localized) name"""
languages = dict(settings.LANGUAGES)
return _(languages[language_code])
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
converts spaces to hyphens, and truncates to 50 characters.
"""
slug = django_slugify(value)
slug = slug[:50]
return slug.rstrip('-')
def simple_language_changer(func):
"""
Proxy for the menus.simple_language_changer decorator
If the menus app is not installed, the original function is returned.
This allows view code to be easily decoupled from Django CMS.
"""
if 'menus' in settings.INSTALLED_APPS:
from menus.utils import simple_language_changer
return simple_language_changer(func)
else:
return func
# TODO: Test this a bit, make signature match handlebars implementation
def first_paragraph(value):
import re
from lxml.html import fragments_fromstring, tostring
fragments = fragments_fromstring(value)
if len(fragments):
for fragment in fragments:
if fragment.tag == 'p':
fragment.drop_tag()
return tostring(fragment)
graphs = re.split(r'[\r\n]{2,}', value)
return graphs[0]
|
"""Shared utility functions"""
from django.conf import settings
from django.template.defaultfilters import slugify as django_slugify
from django.utils.translation import ugettext_lazy as _
def get_language_name(language_code):
"""Convert a language code into its full (localized) name"""
languages = dict(settings.LANGUAGES)
return _(languages[language_code])
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
converts spaces to hyphens, and truncates to 50 characters.
"""
slug = django_slugify(value)
slug = slug[:50]
return slug.rstrip('-')
def simple_language_changer(func):
"""
Proxy for the menus.simple_language_changer decorator
If the menus app is not installed, the original function is returned.
This allows view code to be easily decoupled from Django CMS.
"""
if 'menus' in settings.INSTALLED_APPS:
from menus.utils import simple_language_changer
return simple_language_changer(func)
else:
return func
# TODO: Test this a bit, make signature match handlebars implementation
def first_paragraph(value):
import re
from lxml.html import fragments_fromstring, tostring
fragments = fragments_fromstring(value)
if len(fragments):
for fragment in fragments:
if getattr(fragment, 'tag', None) == 'p':
fragment.drop_tag()
return tostring(fragment)
graphs = re.split(r'[\r\n]{2,}', value)
return graphs[0]
|
Fix parsing of fragments without HTML elements.
|
Fix parsing of fragments without HTML elements.
|
Python
|
mit
|
denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase,denverfoundation/storybase
|
---
+++
@@ -41,7 +41,7 @@
fragments = fragments_fromstring(value)
if len(fragments):
for fragment in fragments:
- if fragment.tag == 'p':
+ if getattr(fragment, 'tag', None) == 'p':
fragment.drop_tag()
return tostring(fragment)
|
cdd3989536e123877755fef621b95d0121e4c665
|
search/kws_dataset.py
|
search/kws_dataset.py
|
from search.html import HTMLVisualization
from utils.transcription import WordCoord
def search_word(word, save=False):
"""
Finds all locations of a word. Optionally the locations can be saved in an
HTML file with the same name as the word.
"""
# TODO: Find the word. Currently just dummy data for testing.
locations = ['270-01-01', '274-01-01', '270-02-02', '273-01-01', '273-02-02', '273-05-01', '270-09-01']
if save:
display_all_occurences(locations, output=word+'.html')
return locations
def display_all_occurences(locations, output='default.html'):
"""
Displays all images that are in the list of locations and highlights all
locations within the images.
"""
locations.sort()
word_coords = [WordCoord(w) for w in locations]
visual = HTMLVisualization()
while word_coords:
same_doc = 0
curr_doc = word_coords[0].get_doc()
for word in word_coords:
if word.get_doc() == curr_doc:
same_doc += 1
else:
break
# remove the words of the same doc from the list
word_ids = [w.__str__() for w in word_coords[:same_doc]]
word_coords = word_coords[same_doc:]
visual.add_image_by_id(curr_doc, word_ids=word_ids)
visual.save(output)
|
Add visualization for search results
|
Add visualization for search results
|
Python
|
mit
|
dwettstein/pattern-recognition-2016,dwettstein/pattern-recognition-2016,dwettstein/pattern-recognition-2016,dwettstein/pattern-recognition-2016
|
---
+++
@@ -1 +1,36 @@
+from search.html import HTMLVisualization
+from utils.transcription import WordCoord
+def search_word(word, save=False):
+ """
+ Finds all locations of a word. Optionally the locations can be saved in an
+ HTML file with the same name as the word.
+ """
+ # TODO: Find the word. Currently just dummy data for testing.
+ locations = ['270-01-01', '274-01-01', '270-02-02', '273-01-01', '273-02-02', '273-05-01', '270-09-01']
+ if save:
+ display_all_occurences(locations, output=word+'.html')
+
+ return locations
+
+def display_all_occurences(locations, output='default.html'):
+ """
+ Displays all images that are in the list of locations and highlights all
+ locations within the images.
+ """
+ locations.sort()
+ word_coords = [WordCoord(w) for w in locations]
+ visual = HTMLVisualization()
+ while word_coords:
+ same_doc = 0
+ curr_doc = word_coords[0].get_doc()
+ for word in word_coords:
+ if word.get_doc() == curr_doc:
+ same_doc += 1
+ else:
+ break
+ # remove the words of the same doc from the list
+ word_ids = [w.__str__() for w in word_coords[:same_doc]]
+ word_coords = word_coords[same_doc:]
+ visual.add_image_by_id(curr_doc, word_ids=word_ids)
+ visual.save(output)
|
|
36593f21c93a16beb5d2ab77ba803a9059099615
|
phillydata/waterdept/load.py
|
phillydata/waterdept/load.py
|
import os
from django.contrib.gis.utils import LayerMapping
from ..load import get_processed_data_file
from .models import WaterParcel, waterparcel_mapping
def from_shapefile(transaction_mode='autocommit', **kwargs):
"""
Load water parcel data into the database from the processed shapefile.
"""
# Using transaction_mode=autocommit because otherwise LayerMapping gets
# stuck on a feature and can't commit anything
filename = os.path.join('water_parcels', 'water_parcels.shp')
parcel_shp = get_processed_data_file(filename)
mapping = LayerMapping(WaterParcel, parcel_shp, waterparcel_mapping,
transform=True, transaction_mode=transaction_mode)
mapping.save(**kwargs)
def load(**kwargs):
from_shapefile(**kwargs)
|
import os
from django.contrib.gis.utils import LayerMapping
from ..load import get_processed_data_file
from .models import WaterAccount, WaterParcel, waterparcel_mapping
def from_shapefile(transaction_mode='autocommit', **kwargs):
"""
Load water parcel data into the database from the processed shapefile.
"""
# Using transaction_mode=autocommit because otherwise LayerMapping gets
# stuck on a feature and can't commit anything
filename = os.path.join('water_parcels', 'water_parcels.shp')
parcel_shp = get_processed_data_file(filename)
mapping = LayerMapping(WaterParcel, parcel_shp, waterparcel_mapping,
transform=True, transaction_mode=transaction_mode)
mapping.save(**kwargs)
def load(**kwargs):
from_shapefile(**kwargs)
def fix_water_accounts():
"""Fix WaterAccount instances that point to old WaterParcels."""
old_accounts = WaterAccount.objects.filter(water_parcel__parcelid__isnull=True)
for water_account in old_accounts:
orig_parcel = water_account.water_parcel
try:
# Find new WaterParcel
new_parcel = WaterParcel.objects.get(parcelid=orig_parcel.parcel_id)
# Update new WaterParcel with missing data from old WaterParcel
for field in orig_parcel._meta.fields:
attrname = field.get_attname()
if not getattr(new_parcel, attrname):
setattr(new_parcel, attrname, getattr(orig_parcel, attrname))
new_parcel.save()
# Point account to new parcel
water_account.water_parcel = new_parcel
water_account.save()
except WaterParcel.DoesNotExist:
print 'Could not find WaterParcel %s. Moving on.' % orig_parcel.parcel_id
continue
|
Fix WaterAccount instances pointing to old WaterParcels
|
Fix WaterAccount instances pointing to old WaterParcels
Start to make older WaterParcels obsolete
|
Python
|
bsd-3-clause
|
ebrelsford/django-phillydata
|
---
+++
@@ -3,7 +3,7 @@
from django.contrib.gis.utils import LayerMapping
from ..load import get_processed_data_file
-from .models import WaterParcel, waterparcel_mapping
+from .models import WaterAccount, WaterParcel, waterparcel_mapping
def from_shapefile(transaction_mode='autocommit', **kwargs):
@@ -21,3 +21,28 @@
def load(**kwargs):
from_shapefile(**kwargs)
+
+
+def fix_water_accounts():
+ """Fix WaterAccount instances that point to old WaterParcels."""
+ old_accounts = WaterAccount.objects.filter(water_parcel__parcelid__isnull=True)
+ for water_account in old_accounts:
+ orig_parcel = water_account.water_parcel
+
+ try:
+ # Find new WaterParcel
+ new_parcel = WaterParcel.objects.get(parcelid=orig_parcel.parcel_id)
+
+ # Update new WaterParcel with missing data from old WaterParcel
+ for field in orig_parcel._meta.fields:
+ attrname = field.get_attname()
+ if not getattr(new_parcel, attrname):
+ setattr(new_parcel, attrname, getattr(orig_parcel, attrname))
+ new_parcel.save()
+
+ # Point account to new parcel
+ water_account.water_parcel = new_parcel
+ water_account.save()
+ except WaterParcel.DoesNotExist:
+ print 'Could not find WaterParcel %s. Moving on.' % orig_parcel.parcel_id
+ continue
|
342d3791aa80084309ffc00a9e5e936fa8277401
|
AFQ/viz.py
|
AFQ/viz.py
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
import tempfile
import os.path as op
import numpy as np
import IPython.display as display
import nibabel as nib
from dipy.viz import fvtk
from dipy.viz.colormap import line_colors
from palettable.tableau import Tableau_20
def visualize_bundles(trk, ren=None, inline=True, interact=False):
"""
Visualize bundles in 3D using fvtk
"""
if isinstance(trk, str):
trk = nib.streamlines.load(trk)
if ren is None:
ren = fvtk.ren()
# There are no bundles in here:
if list(trk.tractogram.data_per_streamline.keys()) == []:
streamlines = list(trk.streamlines)
sl_actor = fvtk.line(streamlines, line_colors(streamlines))
fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
this_sl = list(trk.streamlines[idx])
sl_actor = fvtk.line(this_sl, Tableau_20.colors[np.mod(20, int(b))])
fvtk.add(ren, sl_actor)
if inline:
tdir = tempfile.gettempdir()
fname = op.join(tdir, "fig.png")
fvtk.record(ren, out_path=fname)
display.display_png(display.Image(fname))
if interact:
fvtk.show(ren)
return ren
|
Enable visualizing trk files without bundle designations.
|
Enable visualizing trk files without bundle designations.
|
Python
|
bsd-2-clause
|
yeatmanlab/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ,arokem/pyAFQ
|
---
+++
@@ -5,6 +5,8 @@
import nibabel as nib
from dipy.viz import fvtk
+from dipy.viz.colormap import line_colors
+
from palettable.tableau import Tableau_20
@@ -19,6 +21,12 @@
if ren is None:
ren = fvtk.ren()
+
+ # There are no bundles in here:
+ if list(trk.tractogram.data_per_streamline.keys()) == []:
+ streamlines = list(trk.streamlines)
+ sl_actor = fvtk.line(streamlines, line_colors(streamlines))
+ fvtk.add(ren, sl_actor)
for b in np.unique(trk.tractogram.data_per_streamline['bundle']):
idx = np.where(trk.tractogram.data_per_streamline['bundle'] == b)[0]
|
7ad1d9afdbf8db2960ac6b402f4da3f1675cc86f
|
fileupload/models.py
|
fileupload/models.py
|
from django.db import models
class Picture(models.Model):
"""
This is a small demo using just two fields. ImageField depends on PIL or
pillow (where Pillow is easily installable in a virtualenv. If you have
problems installing pillow, use a more generic FileField instead.
"""
picture_file = models.ImageField(upload_to="pictures")
def __unicode__(self):
return self.picture_file.name
|
from django.db import models
class Picture(models.Model):
"""
This is a small demo using just two fields. ImageField depends on PIL or
pillow (where Pillow is easily installable in a virtualenv. If you have
problems installing pillow, use a more generic FileField instead.
"""
file = models.ImageField(upload_to="pictures")
def __unicode__(self):
return self.file.name
|
Use the same name for the field in frontend and backend
|
Use the same name for the field in frontend and backend
|
Python
|
mit
|
sigurdga/django-dropzone-upload,sigurdga/django-dropzone-upload
|
---
+++
@@ -8,7 +8,7 @@
problems installing pillow, use a more generic FileField instead.
"""
- picture_file = models.ImageField(upload_to="pictures")
+ file = models.ImageField(upload_to="pictures")
def __unicode__(self):
- return self.picture_file.name
+ return self.file.name
|
eaa75a86a3ea64e2c98dbcdd0a0b9731c9505abf
|
sts/contextmanagers.py
|
sts/contextmanagers.py
|
from .models import System
class transition(object):
"Transition context manager."
def __init__(self, obj, state, event=None, start_time=None,
message=None, exception_fail=True):
self.system = System.get(obj)
self.transition = self.system.start_transition(event=event,
start_time=start_time)
self.state = state
self.message = message
self.exception_fail = exception_fail
def __enter__(self):
return self.transition
def __exit__(self, exc_type, exc_value, traceback):
if exc_type and self.exception_fail:
failed = True
else:
failed = None
# Use the locally set message
message = self.transition.message or self.message
# End the transition
self.system.end_transition(self.state, message=message, failed=failed)
|
from .models import System
class transition(object):
"Transition context manager."
def __init__(self, obj, state, event=None, start_time=None,
message=None, exception_fail=True, fail_state='Fail'):
self.system = System.get(obj)
self.transition = self.system.start_transition(event=event,
start_time=start_time)
self.state = state
self.message = message
self.exception_fail = exception_fail
self.fail_state = fail_state
def __enter__(self):
return self.transition
def __exit__(self, exc_type, exc_value, traceback):
if exc_type and self.exception_fail:
failed = True
else:
failed = False
# Use the locally set message
message = self.transition.message or self.message
state = self.fail_state if failed else self.state
# End the transition
self.system.end_transition(state, message=message, failed=failed)
|
Add back fail_state to context manager
|
Add back fail_state to context manager
|
Python
|
bsd-3-clause
|
chop-dbhi/django-sts,chop-dbhi/django-sts
|
---
+++
@@ -4,7 +4,7 @@
class transition(object):
"Transition context manager."
def __init__(self, obj, state, event=None, start_time=None,
- message=None, exception_fail=True):
+ message=None, exception_fail=True, fail_state='Fail'):
self.system = System.get(obj)
self.transition = self.system.start_transition(event=event,
@@ -12,6 +12,7 @@
self.state = state
self.message = message
self.exception_fail = exception_fail
+ self.fail_state = fail_state
def __enter__(self):
return self.transition
@@ -20,10 +21,11 @@
if exc_type and self.exception_fail:
failed = True
else:
- failed = None
+ failed = False
# Use the locally set message
message = self.transition.message or self.message
+ state = self.fail_state if failed else self.state
# End the transition
- self.system.end_transition(self.state, message=message, failed=failed)
+ self.system.end_transition(state, message=message, failed=failed)
|
35f2838d1451681f1cc49fba3b4466389bf2cf68
|
test/test_allocator.py
|
test/test_allocator.py
|
from support import lib,ffi
from qcgc_test import QCGCTest
class AllocatorTest(QCGCTest):
def test_cells_to_bytes(self):
for i in range(1,17):
self.assertEqual(1, lib.bytes_to_cells(i))
self.assertEqual(2, lib.bytes_to_cells(17))
|
from support import lib,ffi
from qcgc_test import QCGCTest
class AllocatorTest(QCGCTest):
def test_cells_to_bytes(self):
for i in range(1,17):
self.assertEqual(1, lib.bytes_to_cells(i))
self.assertEqual(2, lib.bytes_to_cells(17))
def test_init_values(self):
self.assertNotEqual(ffi.NULL, lib.arenas)
for i in range(lib.qcgc_small_free_lists):
l = lib.small_free_list(i)
self.assertNotEqual(ffi.NULL, l)
for i in range(lib.qcgc_large_free_lists):
l = lib.large_free_list(i)
self.assertNotEqual(ffi.NULL, l)
|
Add testcase for allocator initialization
|
Add testcase for allocator initialization
|
Python
|
mit
|
ntruessel/qcgc,ntruessel/qcgc,ntruessel/qcgc
|
---
+++
@@ -6,3 +6,12 @@
for i in range(1,17):
self.assertEqual(1, lib.bytes_to_cells(i))
self.assertEqual(2, lib.bytes_to_cells(17))
+
+ def test_init_values(self):
+ self.assertNotEqual(ffi.NULL, lib.arenas)
+ for i in range(lib.qcgc_small_free_lists):
+ l = lib.small_free_list(i)
+ self.assertNotEqual(ffi.NULL, l)
+ for i in range(lib.qcgc_large_free_lists):
+ l = lib.large_free_list(i)
+ self.assertNotEqual(ffi.NULL, l)
|
928d1d8aab846e9393f925690bd1f51f327fb5ad
|
test_arrange_schedule.py
|
test_arrange_schedule.py
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
print("All test passed")
|
from arrange_schedule import *
def test_read_system_setting():
keys = ['board_py_dir','shutdown','max_db_log','min_db_activity']
system_setting = read_system_setting()
for key in keys:
assert key in system_setting
return system_setting
def test_read_arrange_mode():
keys = ['arrange_sn','arrange_mode','condition']
receive_msg = read_arrange_mode()
for key in keys:
assert key in receive_msg
def test_crawler_cwb_img(system_setting):
send_msg = {}
send_msg['server_dir'] = system_setting['board_py_dir']
send_msg['user_id'] = 1
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
def test_crawler_inside_img():
receive_msg = crawler_inside_news()
assert receive_msg['result'] == 'success'
def test_crawler_techorange_news():
receive_msg = crawler_techorange_news()
assert receive_msg['result'] == 'success'
def test_crawler_medium_news():
receive_msg = crawler_medium_news()
assert receive_msg['result'] == 'success'
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
test_crawler_inside_img()
test_crawler_techorange_news()
test_crawler_medium_news()
print("All test passed")
|
Add test case for forum crawler
|
Add test case for forum crawler
The forum include: 'inside', 'techorange', 'media'
|
Python
|
apache-2.0
|
stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,SWLBot/electronic-blackboard,SWLBot/electronic-blackboard,Billy4195/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,stvreumi/electronic-blackboard,SWLBot/electronic-blackboard,stvreumi/electronic-blackboard,chenyang14/electronic-blackboard,Billy4195/electronic-blackboard,Billy4195/electronic-blackboard,Billy4195/electronic-blackboard,chenyang14/electronic-blackboard
|
---
+++
@@ -22,8 +22,23 @@
receive_msg = crawler_cwb_img(send_msg)
assert receive_msg['result'] == 'success'
+def test_crawler_inside_img():
+ receive_msg = crawler_inside_news()
+ assert receive_msg['result'] == 'success'
+
+def test_crawler_techorange_news():
+ receive_msg = crawler_techorange_news()
+ assert receive_msg['result'] == 'success'
+
+def test_crawler_medium_news():
+ receive_msg = crawler_medium_news()
+ assert receive_msg['result'] == 'success'
+
if __name__ == "__main__":
system_setting = test_read_system_setting()
test_read_arrange_mode()
test_crawler_cwb_img(system_setting)
+ test_crawler_inside_img()
+ test_crawler_techorange_news()
+ test_crawler_medium_news()
print("All test passed")
|
d855e5626ee639a237467af7f6f57947cd17f9c4
|
user_messages/views.py
|
user_messages/views.py
|
from django.contrib.auth.decorators import login_required
from django.db.models import Q
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
if request.user == thread.to_user:
thread.to_user_unread = False
else:
thread.from_user_unread = False
thread.save()
return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
|
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.db.models import Q
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template import RequestContext
from user_messages.forms import MessageReplyForm
from user_messages.models import Thread, Message
@login_required
def inbox(request, template_name='user_messages/inbox.html'):
threads = list(Thread.objects.inbox(request.user))
threads.sort(key=lambda o: o.latest_message.sent_at, reversed=True)
return render_to_response(template_name, {'threads': threads}, context_instance=RequestContext(request))
@login_required
def thread_detail(request, thread_id,
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
if request.method == 'POST':
form = MessageReplyForm(request.POST, user=requst.user, thread=thread)
if form.is_valid():
form.save()
return HttpResponseRedirect(reverse(inbox))
else:
form = MessageReplyForm(user=request.user, thread=thread)
if request.user == thread.to_user:
thread.to_user_unread = False
else:
thread.from_user_unread = False
thread.save()
return render_to_response(template_name, {
'thread': thread,
'form': form
}, context_instance=RequestContext(request))
|
Add reply support to threads
|
Add reply support to threads
|
Python
|
mit
|
pinax/pinax-messages,arthur-wsw/pinax-messages,arthur-wsw/pinax-messages,eldarion/user_messages,pinax/pinax-messages,eldarion/user_messages
|
---
+++
@@ -1,8 +1,11 @@
from django.contrib.auth.decorators import login_required
+from django.core.urlresolvers import reverse
from django.db.models import Q
+from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.template import RequestContext
+from user_messages.forms import MessageReplyForm
from user_messages.models import Thread, Message
@login_required
@@ -16,9 +19,19 @@
template_name='user_messages/thread_detail.html'):
qs = Thread.objects.filter(Q(to_user=request.user) | Q(from_user=request.user))
thread = get_object_or_404(qs, pk=thread_id)
- if request.user == thread.to_user:
- thread.to_user_unread = False
+ if request.method == 'POST':
+ form = MessageReplyForm(request.POST, user=requst.user, thread=thread)
+ if form.is_valid():
+ form.save()
+ return HttpResponseRedirect(reverse(inbox))
else:
- thread.from_user_unread = False
- thread.save()
- return render_to_response(template_name, {'thread': thread}, context_instance=RequestContext(request))
+ form = MessageReplyForm(user=request.user, thread=thread)
+ if request.user == thread.to_user:
+ thread.to_user_unread = False
+ else:
+ thread.from_user_unread = False
+ thread.save()
+ return render_to_response(template_name, {
+ 'thread': thread,
+ 'form': form
+ }, context_instance=RequestContext(request))
|
cc7a5fd3bdf7197f99bd45e4bbcb0b8fe6e5ccd6
|
md5_checksum.py
|
md5_checksum.py
|
# VOD metadata file generator - md5_checksum sub-module
# Copyright 2013 Bo Bayles (bbayles@gmail.com)
# See README for more information
# See LICENSE for license
import hashlib
def md5_checksum(file_path, chunk_bytes=163840):
"""Return the MD5 checksum (hex digest) of the file"""
with open(file_path, "rb") as infile:
checksum = hashlib.md5()
while 1:
data = infile.read(chunk_bytes)
if not data:
break
checksum.update(data)
return checksum.hexdigest()
|
# VOD metadata file generator - md5_checksum sub-module
# Copyright 2013 Bo Bayles (bbayles@gmail.com)
# See README for more information
# See LICENSE for license
import hashlib
def md5_checksum(file_path, chunk_bytes=4194304):
"""Return the MD5 checksum (hex digest) of the file"""
with open(file_path, "rb") as infile:
checksum = hashlib.md5()
while 1:
data = infile.read(chunk_bytes)
if not data:
break
checksum.update(data)
return checksum.hexdigest()
|
Switch default read size to 4 MiB
|
Switch default read size to 4 MiB
|
Python
|
mit
|
bbayles/vod_metadata
|
---
+++
@@ -4,7 +4,7 @@
# See LICENSE for license
import hashlib
-def md5_checksum(file_path, chunk_bytes=163840):
+def md5_checksum(file_path, chunk_bytes=4194304):
"""Return the MD5 checksum (hex digest) of the file"""
with open(file_path, "rb") as infile:
|
c2dbfc7f18dc44747fbb8b14e212cbb4151e8f85
|
analyze.py
|
analyze.py
|
import fore.database
analysis = fore.database.get_analysis(2)
import pickle, base64
analysis = pickle.loads(base64.b64decode(analysis))
print(analysis)
|
import sys
import fore.database
if len(sys.argv) > 1:
track_no = sys.argv[1]
else:
track_no = 2
analysis = fore.database.get_analysis(track_no)
import pickle, base64
analysis = pickle.loads(base64.b64decode(analysis))
print(analysis)
|
Send track number as CLI argument.
|
Send track number as CLI argument.
|
Python
|
artistic-2.0
|
MikeiLL/appension,Rosuav/appension,MikeiLL/appension,MikeiLL/appension,Rosuav/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension
|
---
+++
@@ -1,5 +1,11 @@
+import sys
import fore.database
-analysis = fore.database.get_analysis(2)
+
+if len(sys.argv) > 1:
+ track_no = sys.argv[1]
+else:
+ track_no = 2
+analysis = fore.database.get_analysis(track_no)
import pickle, base64
analysis = pickle.loads(base64.b64decode(analysis))
print(analysis)
|
a3dd1f1c358ab8be7987f9e93ff4f2c0351ae43e
|
porick/views.py
|
porick/views.py
|
from flask import render_template, g
from porick import app, model
@app.route('/')
def landing_page():
return render_template('/index.html')
@app.route('/browse')
@app.route('/browse/<area>')
@app.route('/browse/<area>/page/<page>')
def browse(area=None, page=None):
raise NotImplementedError()
@app.route('/browse/tags')
@app.route('/browse/tags/<tag>')
@app.route('/browse/tags/<tag>/page/<page>')
def browse_by_tags(tag=None, page=None):
raise NotImplementedError()
@app.route('/search')
@app.route('/search/<term>')
@app.route('/search/<term>/page/<page>')
def search(term=None, page=None):
raise NotImplementedError()
@app.route('/create')
def new_quote():
raise NotImplementedError()
@app.route('/signup')
def create_account():
raise NotImplementedError()
@app.route('/login')
def login():
raise NotImplementedError()
@app.route('/logout')
def logout():
raise NotImplementedError()
@app.route('/reset_password')
def reset_password():
raise NotImplementedError()
|
from flask import render_template, g
from porick import app, model
@app.route('/')
def landing_page():
return render_template('/index.html')
@app.route('/browse')
@app.route('/browse/<int:quote_id>')
@app.route('/browse/<area>')
@app.route('/browse/<area>/page/<page>')
def browse(area=None, page=None):
raise NotImplementedError()
@app.route('/browse/tags')
@app.route('/browse/tags/<tag>')
@app.route('/browse/tags/<tag>/page/<page>')
def browse_by_tags(tag=None, page=None):
raise NotImplementedError()
@app.route('/search')
@app.route('/search/<term>')
@app.route('/search/<term>/page/<page>')
def search(term=None, page=None):
raise NotImplementedError()
@app.route('/create')
def new_quote():
raise NotImplementedError()
@app.route('/signup')
def create_account():
raise NotImplementedError()
@app.route('/login')
def login():
raise NotImplementedError()
@app.route('/logout')
def logout():
raise NotImplementedError()
@app.route('/reset_password')
def reset_password():
raise NotImplementedError()
|
Add route for individual quote
|
Add route for individual quote
|
Python
|
apache-2.0
|
stesh/porick-flask,stesh/porick-flask,stesh/porick-flask
|
---
+++
@@ -1,4 +1,5 @@
from flask import render_template, g
+
from porick import app, model
@@ -8,6 +9,7 @@
@app.route('/browse')
+@app.route('/browse/<int:quote_id>')
@app.route('/browse/<area>')
@app.route('/browse/<area>/page/<page>')
def browse(area=None, page=None):
|
2046d82addab9ec83dbb85a2d08c727a52065d8b
|
deckglue/models.py
|
deckglue/models.py
|
from django.db import models
# Create your models here.
|
from django.contrib.auth.models import Permission
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from cardbox.card_model import Card
from cardbox.deck_model import Deck
from guardian.shortcuts import assign_perm, get_users_with_perms
from guardian.models import UserObjectPermission
from memorize.models import Practice
from django.contrib.auth.models import User
@receiver(post_save, sender=UserObjectPermission)
def create_practice_objects_for_new_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
@receiver(pre_delete, sender=UserObjectPermission)
def delete_practice_objects_for_removed_viewers(sender, **kwargs):
if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
@receiver(post_save, sender=Card)
def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
"""Creates practice objects for all users with permission to view the card.
"""
perm_users = get_users_with_perms(kwargs['instance'].deck)
for user in perm_users:
practice = Practice(item = kwargs['instance'], user = user)
if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
practice.save()
@receiver(pre_delete, sender=Card)
def delete_practice_objects(sender, **kwargs):
"""Deletes all practice objects for a card once it is deleted.
"""
Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
|
Add signal hooks to create practice objects
|
Add signal hooks to create practice objects
|
Python
|
mit
|
DummyDivision/Tsune,DummyDivision/Tsune,DummyDivision/Tsune
|
---
+++
@@ -1,3 +1,42 @@
-from django.db import models
+from django.contrib.auth.models import Permission
+from django.db.models.signals import post_save, pre_delete
+from django.dispatch import receiver
+from cardbox.card_model import Card
+from cardbox.deck_model import Deck
+from guardian.shortcuts import assign_perm, get_users_with_perms
+from guardian.models import UserObjectPermission
+from memorize.models import Practice
+from django.contrib.auth.models import User
-# Create your models here.
+@receiver(post_save, sender=UserObjectPermission)
+def create_practice_objects_for_new_viewers(sender, **kwargs):
+ if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
+ for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
+ Practice(item=card, user=User.objects.get(id = kwargs['instance'].user_id)).save()
+
+@receiver(pre_delete, sender=UserObjectPermission)
+def delete_practice_objects_for_removed_viewers(sender, **kwargs):
+ if kwargs['instance'].permission_id == Permission.objects.get(codename="view_deck").id:
+ for card in Card.objects.filter(deck=kwargs['instance'].object_pk):
+ Practice.objects.get(object_id=card.ID, user=User.objects.get(id = kwargs['instance'].user_id)).delete()
+
+@receiver(post_save, sender=Card)
+def create_practice_objects_for_new_card(sender,update_fields, **kwargs):
+ """Creates practice objects for all users with permission to view the card.
+
+ """
+ perm_users = get_users_with_perms(kwargs['instance'].deck)
+ for user in perm_users:
+ practice = Practice(item = kwargs['instance'], user = user)
+ if Practice.objects.filter(object_id = kwargs['instance'].ID, user=user).count() == 0:
+ practice.save()
+
+
+
+@receiver(pre_delete, sender=Card)
+def delete_practice_objects(sender, **kwargs):
+ """Deletes all practice objects for a card once it is deleted.
+
+ """
+ Practice.objects.filter(object_id = kwargs['instance'].ID).delete()
+
|
76c193f457bb45e8e821594de67da8e15c4352d4
|
product.py
|
product.py
|
from datetime import datetime
class Product():
def __init__(self, cost, name, date):
self.cost = cost
self.name = name
self.date = date
def days_left(self):
return datetime.now() - self.date
|
from datetime import datetime
class Product():
def __init__(self, cost, name, date):
self.cost = cost
self.name = name
self.date = date
self.passed_phases = set()
def days_left(self):
return datetime.now() - self.date
if __name__ == '__main__':
main()
def main():
|
Add passed_phases to Product and create main function
|
Add passed_phases to Product and create main function
|
Python
|
mit
|
AliGhahraei/phar-ant-colony
|
---
+++
@@ -5,6 +5,14 @@
self.cost = cost
self.name = name
self.date = date
+ self.passed_phases = set()
def days_left(self):
return datetime.now() - self.date
+
+
+if __name__ == '__main__':
+ main()
+
+
+def main():
|
b6dea08a0a9908d2303693cf4534c7b0beec4154
|
analyticpi/db.py
|
analyticpi/db.py
|
import os
import peewee
APP_DIR = os.path.dirname(__file__)
try:
import urlparse
import psycopg2
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
database = peewee.PostgresqlDatabase(database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port)
except KeyError:
database = peewee.MySQLDatabase(os.environ["MYSQL_DATABASE"],
os.environ["MYSQL_HOST"],
user=os.environ["MYSQL_USER"],
passwd=os.environ["MYSQL_PASSWD"])
|
import os
import peewee
APP_DIR = os.path.dirname(__file__)
try:
import urlparse
import psycopg2
urlparse.uses_netloc.append('postgres')
url = urlparse.urlparse(os.environ["DATABASE_URL"])
database = peewee.PostgresqlDatabase(database=url.path[1:],
user=url.username,
password=url.password,
host=url.hostname,
port=url.port)
except KeyError:
database = peewee.SqliteDatabase('my_app.db')
|
Change from MySQL to SQLite3
|
Change from MySQL to SQLite3
|
Python
|
mit
|
analyticpi/analyticpi,analyticpi/analyticpi,analyticpi/analyticpi
|
---
+++
@@ -15,7 +15,4 @@
host=url.hostname,
port=url.port)
except KeyError:
- database = peewee.MySQLDatabase(os.environ["MYSQL_DATABASE"],
- os.environ["MYSQL_HOST"],
- user=os.environ["MYSQL_USER"],
- passwd=os.environ["MYSQL_PASSWD"])
+ database = peewee.SqliteDatabase('my_app.db')
|
6a1c699f92f43cbe65bf729352695473474a91ae
|
astm/__init__.py
|
astm/__init__.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
from .version import __version__, __version_info__
from .exceptions import BaseASTMError, NotAccepted, InvalidState
from .codec import (
decode, decode_message, decode_record,
encode, encode_message, encode_record,
make_checksum
)
from .mapping import Record, Component
from .records import (
HeaderRecord, PatientRecord, OrderRecord,
ResultRecord, CommentRecord, TerminatorRecord
)
from .protocol import ASTMProtocol
from .client import Client
from .server import RequestHandler, Server
import logging
log = logging.getLogger()
log.addHandler(logging.NullHandler())
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Alexander Shorin
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
from .version import __version__, __version_info__
from .exceptions import BaseASTMError, NotAccepted, InvalidState
from .codec import (
decode, decode_message, decode_record,
encode, encode_message, encode_record,
make_checksum
)
from .mapping import Record, Component
from .records import (
HeaderRecord, PatientRecord, OrderRecord,
ResultRecord, CommentRecord, TerminatorRecord
)
from .protocol import ASTMProtocol
from .client import Client
from .server import RequestHandler, Server
import logging
log = logging.getLogger()
class NullHandler(logging.Handler):
def emit(self, *args, **kwargs):
pass
log.addHandler(NullHandler())
|
Fix compatibility issue with Python 2.6
|
Fix compatibility issue with Python 2.6
|
Python
|
bsd-3-clause
|
asingla87/python-astm,kxepal/python-astm,123412345/python-astm,andrexmd/python-astm,MarcosHaenisch/python-astm,eddiep1101/python-astm,briankip/python-astm,pombreda/python-astm,Iskander1b/python-astm,tinoshot/python-astm,mhaulo/python-astm,LogicalKnight/python-astm,AlanZatarain/python-astm,kxepal/python-astm,tectronics/python-astm,Alwnikrotikz/python-astm
|
---
+++
@@ -25,4 +25,9 @@
import logging
log = logging.getLogger()
-log.addHandler(logging.NullHandler())
+
+class NullHandler(logging.Handler):
+ def emit(self, *args, **kwargs):
+ pass
+
+log.addHandler(NullHandler())
|
a033fdde5a7f8a250865fbeed6f2ff6ce6908420
|
util/git.py
|
util/git.py
|
# -*- coding: utf-8 -*-
GIT_SEMINAR_PATH = 'data/seminar-test/'
TASK_MOOSTER_PATH = 'task-mooster/'
|
# -*- coding: utf-8 -*-
GIT_SEMINAR_PATH = 'data/seminar/'
TASK_MOOSTER_PATH = 'task-mooster/'
|
Fix path to seminar repository.
|
Fix path to seminar repository.
|
Python
|
mit
|
fi-ksi/web-backend,fi-ksi/web-backend
|
---
+++
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-GIT_SEMINAR_PATH = 'data/seminar-test/'
+GIT_SEMINAR_PATH = 'data/seminar/'
TASK_MOOSTER_PATH = 'task-mooster/'
|
aa143e28b61118c0fc3e5d28f2330572213b501c
|
halaqat/urls.py
|
halaqat/urls.py
|
"""halaqat URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from back_office import urls as back_office_url
urlpatterns = [
url(r'^back_office/', include(back_office_url)),
url(r'^admin/', include(admin.site.urls)),
]
|
"""halaqat URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from back_office import urls as back_office_url
from students import urls as students_url
urlpatterns = [
url(r'^back_office/', include(back_office_url)),
url(r'^students/', include(students_url)),
url(r'^admin/', include(admin.site.urls)),
]
|
Add students app url configuration
|
Add students app url configuration
|
Python
|
mit
|
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
|
---
+++
@@ -15,9 +15,12 @@
"""
from django.conf.urls import include, url
from django.contrib import admin
+
from back_office import urls as back_office_url
+from students import urls as students_url
urlpatterns = [
url(r'^back_office/', include(back_office_url)),
+ url(r'^students/', include(students_url)),
url(r'^admin/', include(admin.site.urls)),
]
|
b5fb888c5b74cc99008cdc7e226f354d20b71b8c
|
select_exact.py
|
select_exact.py
|
import sublime_plugin
class SelectExactMatchCommand(sublime_plugin.TextCommand):
last_selection = None
def run(self, edit):
selections = self.view.sel()
if selections[0].empty():
selections.add(self.view.word(selections[0]))
return
word = self.view.substr(self.view.word(selections[0]))
pattern = "\\b%s\\b" % (word)
region = self.view.find(pattern, selections[-1].end())
if not region:
region = self.view.find(
pattern,
self.last_selection.end() if self.last_selection else 0
)
if region:
self.last_selection = region
else:
self.last_selection = None
if region:
selections.add(region)
def description():
return "Select Exact Match"
class SelectAllExactMatchesCommand(sublime_plugin.TextCommand):
def run(self, edit):
selections = self.view.sel()
if selections[0].empty():
selections.add(self.view.word(selections[0]))
word = self.view.substr(self.view.word(selections[0]))
pattern = "\\b%s\\b" % (word)
selections.add_all(self.view.find_all(pattern))
def description():
return "Select All Exact Matches"
|
import sublime_plugin
class SelectExactMatchCommand(sublime_plugin.TextCommand):
last_selection = None
def run(self, edit):
selections = self.view.sel()
words_selection = False
for selection in selections:
if selection.empty():
words_selection = True
region = self.view.word(selection)
selections.add(region)
self.view.show(region)
if words_selection:
return
word = self.view.substr(self.view.word(selections[-1]))
pattern = "\\b%s\\b" % (word)
region = self.view.find(pattern, selections[-1].end())
if not region:
region = self.view.find(
pattern,
self.last_selection.end() if self.last_selection else 0
)
if region:
self.last_selection = region
else:
self.last_selection = None
if region:
selections.add(region)
self.view.show(region)
def description():
return "Select Exact Match"
class SelectAllExactMatchesCommand(sublime_plugin.TextCommand):
def run(self, edit):
selections = self.view.sel()
if selections[-1].empty():
selections.add(self.view.word(selections[-1]))
word = self.view.substr(self.view.word(selections[-1]))
pattern = "\\b%s\\b" % (word)
selections.add_all(self.view.find_all(pattern))
def description():
return "Select All Exact Matches"
|
Fix the issue when use with multiple cursors and scroll the view when selected
|
Fix the issue when use with multiple cursors and scroll the view when selected
|
Python
|
mit
|
spywhere/SelectExact,spywhere/SelectExact
|
---
+++
@@ -6,10 +6,16 @@
def run(self, edit):
selections = self.view.sel()
- if selections[0].empty():
- selections.add(self.view.word(selections[0]))
+ words_selection = False
+ for selection in selections:
+ if selection.empty():
+ words_selection = True
+ region = self.view.word(selection)
+ selections.add(region)
+ self.view.show(region)
+ if words_selection:
return
- word = self.view.substr(self.view.word(selections[0]))
+ word = self.view.substr(self.view.word(selections[-1]))
pattern = "\\b%s\\b" % (word)
region = self.view.find(pattern, selections[-1].end())
if not region:
@@ -23,6 +29,7 @@
self.last_selection = None
if region:
selections.add(region)
+ self.view.show(region)
def description():
return "Select Exact Match"
@@ -31,9 +38,9 @@
class SelectAllExactMatchesCommand(sublime_plugin.TextCommand):
def run(self, edit):
selections = self.view.sel()
- if selections[0].empty():
- selections.add(self.view.word(selections[0]))
- word = self.view.substr(self.view.word(selections[0]))
+ if selections[-1].empty():
+ selections.add(self.view.word(selections[-1]))
+ word = self.view.substr(self.view.word(selections[-1]))
pattern = "\\b%s\\b" % (word)
selections.add_all(self.view.find_all(pattern))
|
082897d8216f3ec2feca8af4afbc0be7401956d5
|
systemofrecord/__init__.py
|
systemofrecord/__init__.py
|
import os
import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
def configure_logging(obj):
logger = logging.getLogger(obj.__class__.__name__)
logger.addHandler(logging.StreamHandler())
if app.config['DEBUG']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
return logger
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
app.logger.info("\nConfiguration\n%s\n" % app.config)
def configure_health():
from systemofrecord.health import Health
from systemofrecord.repository import blockchain_object_repository
from systemofrecord.services import ingest_queue, chain_queue
Health(app,
checks=[blockchain_object_repository.health,
ingest_queue.health,
chain_queue.health
])
configure_health()
|
import os
import logging
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from raven.contrib.flask import Sentry
app = Flask(__name__)
app.config.from_object(os.environ.get('SETTINGS'))
db = SQLAlchemy(app)
def configure_logging(obj):
logger = logging.getLogger(obj.__class__.__name__)
logger.addHandler(logging.StreamHandler())
if app.config['DEBUG']:
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
return logger
# Sentry exception reporting
if 'SENTRY_DSN' in os.environ:
sentry = Sentry(app, dsn=os.environ['SENTRY_DSN'])
if not app.debug:
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
app.logger.debug("\nConfiguration\n%s\n" % app.config)
def configure_health():
from systemofrecord.health import Health
from systemofrecord.repository import blockchain_object_repository
from systemofrecord.services import ingest_queue, chain_queue
Health(app,
checks=[blockchain_object_repository.health,
ingest_queue.health,
chain_queue.health
])
configure_health()
|
Set config logging in init to debug
|
Set config logging in init to debug
|
Python
|
mit
|
LandRegistry/system-of-record-alpha,LandRegistry/system-of-record-alpha,LandRegistry/system-of-record-alpha
|
---
+++
@@ -28,7 +28,7 @@
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.INFO)
-app.logger.info("\nConfiguration\n%s\n" % app.config)
+app.logger.debug("\nConfiguration\n%s\n" % app.config)
def configure_health():
|
8e45eb77394ad47579f5726e8f2e63794b8e10c5
|
farnsworth/wsgi.py
|
farnsworth/wsgi.py
|
"""
WSGI config for farnsworth project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "farnsworth.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
"""
WSGI config for farnsworth project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "farnsworth.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "farnsworth.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
Fix python-path when WSGIPythonPath is not defined
|
Fix python-path when WSGIPythonPath is not defined
|
Python
|
bsd-2-clause
|
knagra/farnsworth,knagra/farnsworth,knagra/farnsworth,knagra/farnsworth
|
---
+++
@@ -14,6 +14,8 @@
"""
import os
+import sys
+sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
|
b33222fd9d16efa88864d0c1f28cce9d0a8c3f68
|
fastentrypoints.py
|
fastentrypoints.py
|
'''
Monkey patch setuptools to write faster console_scripts with this format:
from mymodule import entry_function
entry_function()
This is better.
'''
from setuptools.command import easy_install
@classmethod
def get_args(cls, dist, header=None):
"""
Yield write_script() argument tuples for a distribution's
console_scripts and gui_scripts entry points.
"""
template = 'import sys\nfrom {0} import {1}\nsys.exit({1}())'
if header is None:
header = cls.get_header()
spec = str(dist.as_requirement())
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
cls._ensure_safe_name(name)
script_text = template.format(
ep.module_name, ep.attrs[0])
args = cls._get_script_args(type_, name, header, script_text)
for res in args:
yield res
easy_install.ScriptWriter.get_args = get_args
def main():
import shutil
import sys
dests = sys.argv[1:] or ['.']
print(__name__)
for dst in dests:
shutil.copy(__file__, dst)
with open(dst + '/MANIFEST.in', 'a') as manifest:
manifest.write('\ninclude fastentrypoints.py')
|
'''
Monkey patch setuptools to write faster console_scripts with this format:
from mymodule import entry_function
entry_function()
This is better.
'''
from setuptools.command import easy_install
@classmethod
def get_args(cls, dist, header=None):
"""
Yield write_script() argument tuples for a distribution's
console_scripts and gui_scripts entry points.
"""
template = 'import sys\nfrom {0} import {1}\nsys.exit({1}())'
if header is None:
header = cls.get_header()
spec = str(dist.as_requirement())
for type_ in 'console', 'gui':
group = type_ + '_scripts'
for name, ep in dist.get_entry_map(group).items():
cls._ensure_safe_name(name)
script_text = template.format(
ep.module_name, ep.attrs[0])
args = cls._get_script_args(type_, name, header, script_text)
for res in args:
yield res
easy_install.ScriptWriter.get_args = get_args
def main():
import re
import shutil
import sys
dests = sys.argv[1:] or ['.']
filename = re.sub('\.pyc$', '.py', __file__)
for dst in dests:
shutil.copy(filename, dst)
with open(dst + '/MANIFEST.in', 'a') as manifest:
manifest.write('\ninclude fastentrypoints.py')
|
Make sure that .py file is used, even if .pyc got executed
|
Make sure that .py file is used, even if .pyc got executed
If python already byte-compiled the source code to .pyc file,
the __file__ points to .pyc, rather than to .py, which breaks the
copying mechanism.
Use regex substitution to make sure we're always copying the original
source file.
|
Python
|
bsd-2-clause
|
ninjaaron/fast-entry_points
|
---
+++
@@ -34,11 +34,12 @@
def main():
+ import re
import shutil
import sys
dests = sys.argv[1:] or ['.']
- print(__name__)
+ filename = re.sub('\.pyc$', '.py', __file__)
for dst in dests:
- shutil.copy(__file__, dst)
+ shutil.copy(filename, dst)
with open(dst + '/MANIFEST.in', 'a') as manifest:
manifest.write('\ninclude fastentrypoints.py')
|
a116c3eae892a73b11372225a9bdf0194db75598
|
glanerbeard/web.py
|
glanerbeard/web.py
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return str(shows)
if __name__ == '__main__':
app.debug = True
app.run()
|
import logging
from flask import (
Flask,
render_template,
abort
)
from glanerbeard.server import Server
app = Flask(__name__)
app.config.from_object('glanerbeard.default_settings')
app.config.from_envvar('GLANERBEARD_SETTINGS')
numeric_level = getattr(logging, app.config['LOGLEVEL'].upper(), None)
if not isinstance(numeric_level, int):
raise ValueError('Invalid log level: %s' % loglevel)
logging.basicConfig(level=numeric_level)
log = logging.getLogger(__name__)
servers = Server.createFromConfig(app.config['SERVERS'], app.config['API_KEYS'])
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
return render_template('json.html', json=shows)
|
Use a template to render json.
|
Use a template to render json.
|
Python
|
apache-2.0
|
daenney/glanerbeard
|
---
+++
@@ -23,9 +23,4 @@
@app.route('/')
def index():
shows = [server.getShows() for server in servers]
- return str(shows)
-
-
-if __name__ == '__main__':
- app.debug = True
- app.run()
+ return render_template('json.html', json=shows)
|
3551020db091380e24fd64b9553e00c1f92600e7
|
glitch/__main__.py
|
glitch/__main__.py
|
from . import utils
# TODO: Override with port=NNNN if specified by environment
# Note that these functions lazily import their corresponding modules,
# otherwise package startup would take three parts of forever.
@utils.cmdline
def renderer(*, gain:"g"=0.0):
"""Invoke the infinite renderer
gain: dB gain (positive or negative) for volume adjustment
"""
from . import renderer
renderer.run(gain=gain) # doesn't return
@utils.cmdline
def major_glitch(*, dev=False):
"""Rebuild the Major Glitch"""
utils.enable_timer()
from . import renderer
renderer.major_glitch(profile=dev)
@utils.cmdline
def audition(id1, id2, fn):
"""Audition a transition
id1: ID of earlier track (will render last 10s)
id2: ID of later track (will render first 10s)
fn: File name to save into
"""
from . import renderer
renderer.audition(id1, id2, fn)
@utils.cmdline
def main(*, dev=False):
"""Start the main server (debug mode - production uses gunicorn)"""
from . import server
server.run(disable_logins=dev) # doesn't return
utils.main()
|
from . import utils
# TODO: Override with port=NNNN if specified by environment
from . import database # Let the database functions register themselves
# Note that these functions lazily import their corresponding modules,
# otherwise package startup would take three parts of forever.
@utils.cmdline
def renderer(*, gain:"g"=0.0):
"""Invoke the infinite renderer
gain: dB gain (positive or negative) for volume adjustment
"""
from . import renderer
renderer.run(gain=gain) # doesn't return
@utils.cmdline
def major_glitch(*, dev=False):
"""Rebuild the Major Glitch"""
utils.enable_timer()
from . import renderer
renderer.major_glitch(profile=dev)
@utils.cmdline
def audition(id1, id2, fn):
"""Audition a transition
id1: ID of earlier track (will render last 10s)
id2: ID of later track (will render first 10s)
fn: File name to save into
"""
from . import renderer
renderer.audition(id1, id2, fn)
@utils.cmdline
def main(*, dev=False):
"""Start the main server (debug mode - production uses gunicorn)"""
from . import server
server.run(disable_logins=dev) # doesn't return
utils.main()
|
Load up database functions into clize
|
Load up database functions into clize
|
Python
|
artistic-2.0
|
MikeiLL/appension,MikeiLL/appension,MikeiLL/appension,MikeiLL/appension
|
---
+++
@@ -1,6 +1,8 @@
from . import utils
# TODO: Override with port=NNNN if specified by environment
+
+from . import database # Let the database functions register themselves
# Note that these functions lazily import their corresponding modules,
# otherwise package startup would take three parts of forever.
|
ad0859f2e7b6f659fe964f786277ea2ad3fdf787
|
src/listener.py
|
src/listener.py
|
# -*- coding: utf-8 -*-
import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
|
# -*- coding: utf-8 -*-
import logging
import socket
import threading
from connection import Connection
import shared
class Listener(threading.Thread):
def __init__(self, host, port, family=socket.AF_INET):
super().__init__(name='Listener')
self.host = host
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
def run(self):
self.s.listen(1)
self.s.settimeout(1)
while True:
try:
conn, addr = self.s.accept()
logging.info('Incoming connection from: {}:{}'.format(addr[0], addr[1]))
with shared.connections_lock:
c = Connection(addr[0], addr[1], conn)
c.start()
shared.connections.add(c)
except socket.timeout:
pass
|
Add SO_REUSEADDR to socket options
|
Add SO_REUSEADDR to socket options
|
Python
|
mit
|
TheKysek/MiNode,TheKysek/MiNode
|
---
+++
@@ -14,6 +14,7 @@
self.port = port
self.family = family
self.s = socket.socket(self.family, socket.SOCK_STREAM)
+ self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind((self.host, self.port))
def run(self):
|
03b07ca359c218b10837c2f1cdf4027474fdd856
|
windberg_register/admin.py
|
windberg_register/admin.py
|
from windberg_register import models
from django.contrib import admin
class StarterAdmin(admin.ModelAdmin):
list_display = ("name", "given", "age_group_short", "club_name", "email", "run_list", "comment")
list_per_page = 1000
def club_name(self, obj):
return obj.club.name
club_name.short_description = u"Verein"
def age_group_short(self, obj):
return obj.actual_age_group().short
age_group_short.short_description = u"gemeldete Ak"
def run_list(self, obj):
return u"; ".join(r.name for r in obj.runs.all())
run_list.short_description = u"gemeldete Wertungen"
admin.site.register(models.AgeGroup)
admin.site.register(models.Club)
admin.site.register(models.Run)
admin.site.register(models.Start)
admin.site.register(models.Starter, StarterAdmin)
admin.site.register(models.Version)
|
import codecs
from collections import defaultdict
from django.http import HttpResponse
import unicodecsv
from windberg_register import models
from django.contrib import admin
class StarterAdmin(admin.ModelAdmin):
list_display = ("name", "given", "age_group_short", "club_name", "email", "run_list", "comment")
list_per_page = 1000
actions = ['_make_csv_list']
def club_name(self, obj):
return obj.club.name
club_name.short_description = u"Verein"
def age_group_short(self, obj):
return obj.actual_age_group().short
age_group_short.short_description = u"gemeldete Ak"
def run_list(self, obj):
return u"; ".join(r.name for r in obj.runs.all())
run_list.short_description = u"gemeldete Wertungen"
def _make_csv_list(self, request, queryset):
run_dict = defaultdict(list)
for starter in queryset:
for run in starter.runs.all():
run_dict[run.name].append(starter)
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="meldungen.csv"'
writer = unicodecsv.writer(response)
for collected_run in run_dict:
writer.writerow([collected_run])
for starter in run_dict[collected_run]:
writer.writerow(["", "", "", starter.name, starter.given, starter.birth.year, starter.club.name,
starter.actual_age_group().short])
writer.writerow([""])
return response
_make_csv_list.short_description = "export CSV"
admin.site.register(models.AgeGroup)
admin.site.register(models.Club)
admin.site.register(models.Run)
admin.site.register(models.Start)
admin.site.register(models.Starter, StarterAdmin)
admin.site.register(models.Version)
|
Add csv export feature for appointments
|
Add csv export feature for appointments
|
Python
|
bsd-3-clause
|
janLo/Windberg-web,janLo/Windberg-web
|
---
+++
@@ -1,3 +1,7 @@
+import codecs
+from collections import defaultdict
+from django.http import HttpResponse
+import unicodecsv
from windberg_register import models
from django.contrib import admin
@@ -5,6 +9,7 @@
class StarterAdmin(admin.ModelAdmin):
list_display = ("name", "given", "age_group_short", "club_name", "email", "run_list", "comment")
list_per_page = 1000
+ actions = ['_make_csv_list']
def club_name(self, obj):
return obj.club.name
@@ -15,9 +20,28 @@
age_group_short.short_description = u"gemeldete Ak"
def run_list(self, obj):
- return u"; ".join(r.name for r in obj.runs.all())
+ return u"; ".join(r.name for r in obj.runs.all())
run_list.short_description = u"gemeldete Wertungen"
+ def _make_csv_list(self, request, queryset):
+ run_dict = defaultdict(list)
+ for starter in queryset:
+ for run in starter.runs.all():
+ run_dict[run.name].append(starter)
+
+ response = HttpResponse(content_type='text/csv')
+ response['Content-Disposition'] = 'attachment; filename="meldungen.csv"'
+
+ writer = unicodecsv.writer(response)
+
+ for collected_run in run_dict:
+ writer.writerow([collected_run])
+ for starter in run_dict[collected_run]:
+ writer.writerow(["", "", "", starter.name, starter.given, starter.birth.year, starter.club.name,
+ starter.actual_age_group().short])
+ writer.writerow([""])
+ return response
+ _make_csv_list.short_description = "export CSV"
admin.site.register(models.AgeGroup)
|
f050d47ae8f835c4da7cdb45e217be77f42f01f5
|
fabfile.py
|
fabfile.py
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
from fabric.api import execute, local, settings, task
@task
def preprocess_header():
local('cpp -nostdinc spotify/api.h > spotify/api.processed.h || true')
@task
def docs():
local('make -C docs/ html')
@task
def autodocs():
auto(docs)
@task
def test():
local('nosetests')
@task
def autotest():
auto(test)
def auto(task):
while True:
local('clear')
with settings(warn_only=True):
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
'--exclude ".*\.(pyc|sw.)" -r docs/ spotify/ tests/')
@task
def update_authors():
# Keep authors in the order of appearance and use awk to filter out dupes
local(
"git log --format='- %aN <%aE>' --reverse | awk '!x[$0]++' > AUTHORS")
|
Watch the docs/ dir for changes
|
fab: Watch the docs/ dir for changes
|
Python
|
apache-2.0
|
felix1m/pyspotify,jodal/pyspotify,kotamat/pyspotify,kotamat/pyspotify,jodal/pyspotify,felix1m/pyspotify,kotamat/pyspotify,mopidy/pyspotify,jodal/pyspotify,mopidy/pyspotify,felix1m/pyspotify
|
---
+++
@@ -33,7 +33,7 @@
execute(task)
local(
'inotifywait -q -e create -e modify -e delete '
- '--exclude ".*\.(pyc|sw.)" -r spotify/ tests/')
+ '--exclude ".*\.(pyc|sw.)" -r docs/ spotify/ tests/')
@task
|
75dfc329430732159c6fd8735898922ee4d86a86
|
basic/events/urls.py
|
basic/events/urls.py
|
from django.conf.urls.defaults import *
urlpatterns = patterns('basic.events.views',
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/(?P<id>\d)/$',
view='event_detail',
name='event_detail'
),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/$',
view='event_archive_day',
name='event_archive_day'
),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/$',
view='event_archive_month',
name='event_archive_month'
),
url(r'^(?P<year>\d{4})/$',
view='event_archive_year',
name='event_archive_year'
),
url(r'^$',
view='event_list',
name='event_index'
),
)
|
from django.conf.urls.defaults import *
urlpatterns = patterns('basic.events.views',
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/(?P<id>\d+)/$',
view='event_detail',
name='event_detail'
),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/$',
view='event_archive_day',
name='event_archive_day'
),
url(r'^(?P<year>\d{4})/(?P<month>\w{3})/$',
view='event_archive_month',
name='event_archive_month'
),
url(r'^(?P<year>\d{4})/$',
view='event_archive_year',
name='event_archive_year'
),
url(r'^$',
view='event_list',
name='event_index'
),
)
|
Support more than one digit for EventTime IDs.
|
Support more than one digit for EventTime IDs.
|
Python
|
bsd-3-clause
|
sedden/django-basic-apps,sedden/django-basic-apps
|
---
+++
@@ -2,7 +2,7 @@
urlpatterns = patterns('basic.events.views',
- url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/(?P<id>\d)/$',
+ url(r'^(?P<year>\d{4})/(?P<month>\w{3})/(?P<day>\d{1,2})/(?P<slug>[-\w]+)/(?P<id>\d+)/$',
view='event_detail',
name='event_detail'
),
|
6f770e3da8dda9bc91300e323d386f6a6863c86e
|
testing/test-cases/selenium-tests/pointClustering/testPointCluster.py
|
testing/test-cases/selenium-tests/pointClustering/testPointCluster.py
|
#!/usr/bin/env python
from selenium_test import FirefoxTest, ChromeTest,\
setUpModule, tearDownModule
class glPointsBase(object):
testCase = ('pointClustering',)
testRevision = 4
def loadPage(self):
self.resizeWindow(640, 480)
self.loadURL('pointClustering/index.html')
self.wait()
self.resizeWindow(640, 480)
def testClustering0(self):
self.loadPage()
self.screenshotTest('zoom0')
def testClustering2(self):
self.loadPage()
self.runScript(
'myMap.zoom(5); myMap.center({x: -99, y: 40});'
)
self.screenshotTest('zoom2')
class FirefoxOSM(glPointsBase, FirefoxTest):
testCase = glPointsBase.testCase + ('firefox',)
class ChromeOSM(glPointsBase, ChromeTest):
testCase = glPointsBase.testCase + ('chrome',)
if __name__ == '__main__':
import unittest
unittest.main()
|
#!/usr/bin/env python
from time import sleep
from selenium_test import FirefoxTest, ChromeTest,\
setUpModule, tearDownModule
class glPointsBase(object):
testCase = ('pointClustering',)
testRevision = 4
def loadPage(self):
self.resizeWindow(640, 480)
self.loadURL('pointClustering/index.html')
self.wait()
self.resizeWindow(640, 480)
sleep(5)
def testClustering0(self):
self.loadPage()
self.screenshotTest('zoom0')
def testClustering2(self):
self.loadPage()
self.runScript(
'myMap.zoom(5); myMap.center({x: -99, y: 40});'
)
self.screenshotTest('zoom2')
class FirefoxOSM(glPointsBase, FirefoxTest):
testCase = glPointsBase.testCase + ('firefox',)
class ChromeOSM(glPointsBase, ChromeTest):
testCase = glPointsBase.testCase + ('chrome',)
if __name__ == '__main__':
import unittest
unittest.main()
|
Add an explicit sleep in pointClustering test
|
Add an explicit sleep in pointClustering test
|
Python
|
apache-2.0
|
OpenGeoscience/geojs,OpenGeoscience/geojs,Kitware/geojs,OpenGeoscience/geojs,Kitware/geojs,Kitware/geojs
|
---
+++
@@ -1,5 +1,6 @@
#!/usr/bin/env python
+from time import sleep
from selenium_test import FirefoxTest, ChromeTest,\
setUpModule, tearDownModule
@@ -13,6 +14,7 @@
self.loadURL('pointClustering/index.html')
self.wait()
self.resizeWindow(640, 480)
+ sleep(5)
def testClustering0(self):
self.loadPage()
|
7fbc356ec6896e441f2423bd3168ff231b4a8bb2
|
roles/openshift_hosted/filter_plugins/filters.py
|
roles/openshift_hosted/filter_plugins/filters.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Custom filters for use in openshift_hosted
'''
class FilterModule(object):
''' Custom ansible filters for use by openshift_hosted role'''
@staticmethod
def get_router_replicas(replicas=None, router_nodes=None):
''' This function will return the number of replicas
based on the results from the defined
openshift.hosted.router.replicas OR
the query from oc_obj on openshift nodes with a selector OR
default to 1
'''
# We always use what they've specified if they've specified a value
if replicas is not None:
return replicas
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
'items' in router_nodes['results']['results']):
return len(router_nodes['results']['results'][0]['items'])
return 1
def filters(self):
''' returns a mapping of filters to methods '''
return {'get_router_replicas': self.get_router_replicas}
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
Custom filters for use in openshift_hosted
'''
class FilterModule(object):
''' Custom ansible filters for use by openshift_hosted role'''
@staticmethod
def get_router_replicas(replicas=None, router_nodes=None):
''' This function will return the number of replicas
based on the results from the defined
openshift.hosted.router.replicas OR
the query from oc_obj on openshift nodes with a selector OR
default to 1
'''
# We always use what they've specified if they've specified a value
if replicas is not None:
return replicas
replicas = 1
# Ignore boolean expression limit of 5.
# pylint: disable=too-many-boolean-expressions
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
isinstance(router_nodes['results']['results'], list) and
len(router_nodes['results']['results']) > 0 and
'items' in router_nodes['results']['results'][0]):
if len(router_nodes['results']['results'][0]['items']) > 0:
replicas = len(router_nodes['results']['results'][0]['items'])
return replicas
def filters(self):
''' returns a mapping of filters to methods '''
return {'get_router_replicas': self.get_router_replicas}
|
Fix get_router_replicas infrastructure node count.
|
Fix get_router_replicas infrastructure node count.
|
Python
|
apache-2.0
|
detiber/openshift-ansible,openshift/openshift-ansible,miminar/openshift-ansible,ttindell2/openshift-ansible,aveshagarwal/openshift-ansible,gburges/openshift-ansible,tagliateller/openshift-ansible,abutcher/openshift-ansible,mmahut/openshift-ansible,zhiwliu/openshift-ansible,zhiwliu/openshift-ansible,EricMountain-1A/openshift-ansible,miminar/openshift-ansible,akubicharm/openshift-ansible,liggitt/openshift-ansible,zhiwliu/openshift-ansible,nhr/openshift-ansible,thoraxe/openshift-ansible,rjhowe/openshift-ansible,sdodson/openshift-ansible,gburges/openshift-ansible,EricMountain-1A/openshift-ansible,miminar/openshift-ansible,ewolinetz/openshift-ansible,mmahut/openshift-ansible,ttindell2/openshift-ansible,aveshagarwal/openshift-ansible,miminar/openshift-ansible,aveshagarwal/openshift-ansible,markllama/openshift-ansible,akram/openshift-ansible,jwhonce/openshift-ansible,wbrefvem/openshift-ansible,ewolinetz/openshift-ansible,liggitt/openshift-ansible,markllama/openshift-ansible,maxamillion/openshift-ansible,abutcher/openshift-ansible,rjhowe/openshift-ansible,twiest/openshift-ansible,bparees/openshift-ansible,sosiouxme/openshift-ansible,tagliateller/openshift-ansible,wbrefvem/openshift-ansible,openshift/openshift-ansible,ewolinetz/openshift-ansible,anpingli/openshift-ansible,rjhowe/openshift-ansible,ewolinetz/openshift-ansible,twiest/openshift-ansible,bparees/openshift-ansible,wbrefvem/openshift-ansible,akubicharm/openshift-ansible,mmahut/openshift-ansible,wbrefvem/openshift-ansible,sdodson/openshift-ansible,rjhowe/openshift-ansible,liggitt/openshift-ansible,akubicharm/openshift-ansible,jwhonce/openshift-ansible,aveshagarwal/openshift-ansible,git001/openshift-ansible,miminar/openshift-ansible,DG-i/openshift-ansible,EricMountain-1A/openshift-ansible,kwoodson/openshift-ansible,rhdedgar/openshift-ansible,rhdedgar/openshift-ansible,markllama/openshift-ansible,mwoodson/openshift-ansible,sosiouxme/openshift-ansible,tagliateller/openshift-ansible,sdodson/openshift-ansible,sdodson/openshift-ansible,zhiwliu/openshift-ansible,aveshagarwal/openshift-ansible,twiest/openshift-ansible,git001/openshift-ansible,ewolinetz/openshift-ansible,jwhonce/openshift-ansible,mmahut/openshift-ansible,maxamillion/openshift-ansible,DG-i/openshift-ansible,jwhonce/openshift-ansible,maxamillion/openshift-ansible,anpingli/openshift-ansible,detiber/openshift-ansible,DG-i/openshift-ansible,sosiouxme/openshift-ansible,liggitt/openshift-ansible,detiber/openshift-ansible,akram/openshift-ansible,EricMountain-1A/openshift-ansible,maxamillion/openshift-ansible,rjhowe/openshift-ansible,thoraxe/openshift-ansible,sdodson/openshift-ansible,markllama/openshift-ansible,detiber/openshift-ansible,thoraxe/openshift-ansible,akubicharm/openshift-ansible,thoraxe/openshift-ansible,git001/openshift-ansible,ttindell2/openshift-ansible,abutcher/openshift-ansible,sosiouxme/openshift-ansible,maxamillion/openshift-ansible,sosiouxme/openshift-ansible,mmahut/openshift-ansible,abutcher/openshift-ansible,tagliateller/openshift-ansible,abutcher/openshift-ansible,liggitt/openshift-ansible,nak3/openshift-ansible,markllama/openshift-ansible,nhr/openshift-ansible,wbrefvem/openshift-ansible,mwoodson/openshift-ansible,nak3/openshift-ansible,zhiwliu/openshift-ansible,jwhonce/openshift-ansible,twiest/openshift-ansible,akubicharm/openshift-ansible,detiber/openshift-ansible,EricMountain-1A/openshift-ansible,DG-i/openshift-ansible,kwoodson/openshift-ansible,ttindell2/openshift-ansible,ttindell2/openshift-ansible,nhr/openshift-ansible,twiest/openshift-ansible,git001/openshift-ansible,tagliateller/openshift-ansible
|
---
+++
@@ -21,14 +21,21 @@
if replicas is not None:
return replicas
+ replicas = 1
+
+ # Ignore boolean expression limit of 5.
+ # pylint: disable=too-many-boolean-expressions
if (isinstance(router_nodes, dict) and
'results' in router_nodes and
'results' in router_nodes['results'] and
- 'items' in router_nodes['results']['results']):
+ isinstance(router_nodes['results']['results'], list) and
+ len(router_nodes['results']['results']) > 0 and
+ 'items' in router_nodes['results']['results'][0]):
- return len(router_nodes['results']['results'][0]['items'])
+ if len(router_nodes['results']['results'][0]['items']) > 0:
+ replicas = len(router_nodes['results']['results'][0]['items'])
- return 1
+ return replicas
def filters(self):
''' returns a mapping of filters to methods '''
|
4733055d8eee5e0e3ca3bd47eaa5c776bb62c9a8
|
tests/test_geodataframe.py
|
tests/test_geodataframe.py
|
import unittest
import json
from geopandas import GeoDataFrame
class TestSeries(unittest.TestCase):
def setUp(self):
# Data from http://www.nyc.gov/html/dcp/download/bytes/nybb_13a.zip
# saved as geopandas/examples/nybb_13a.zip.
self.df = GeoDataFrame.from_file(
'/nybb_13a/nybb.shp', vfs='zip://examples/nybb_13a.zip')
def test_from_file_(self):
self.assertTrue('geometry' in self.df)
self.assertTrue(len(self.df) == 5)
def test_to_json(self):
text = self.df.to_json()
data = json.loads(text)
self.assertTrue(data['type'] == 'FeatureCollection')
self.assertTrue(len(data['features']) == 5)
|
import unittest
import json
import numpy as np
from geopandas import GeoDataFrame
class TestDataFrame(unittest.TestCase):
def setUp(self):
# Data from http://www.nyc.gov/html/dcp/download/bytes/nybb_13a.zip
# saved as geopandas/examples/nybb_13a.zip.
self.df = GeoDataFrame.from_file(
'/nybb_13a/nybb.shp', vfs='zip://examples/nybb_13a.zip')
def test_from_file_(self):
self.assertTrue('geometry' in self.df)
self.assertTrue(len(self.df) == 5)
self.assertTrue(np.alltrue(self.df['BoroName'].values == np.array(['Staten Island',
'Queens', 'Brooklyn', 'Manhattan', 'Bronx'])))
def test_to_json(self):
text = self.df.to_json()
data = json.loads(text)
self.assertTrue(data['type'] == 'FeatureCollection')
self.assertTrue(len(data['features']) == 5)
|
Add test for values of text columns in GeoDataFrame from file
|
Add test for values of text columns in GeoDataFrame from file
|
Python
|
bsd-3-clause
|
maxalbert/geopandas,fonnesbeck/geopandas,geopandas/geopandas,geopandas/geopandas,scw/geopandas,micahcochran/geopandas,jdmcbr/geopandas,koldunovn/geopandas,jwass/geopandas,snario/geopandas,IamJeffG/geopandas,ozak/geopandas,perrygeo/geopandas,jdmcbr/geopandas,ozak/geopandas,urschrei/geopandas,geopandas/geopandas,jorisvandenbossche/geopandas,micahcochran/geopandas,kwinkunks/geopandas,jorisvandenbossche/geopandas,jorisvandenbossche/geopandas,jwass/geopandas
|
---
+++
@@ -1,9 +1,10 @@
import unittest
import json
+import numpy as np
from geopandas import GeoDataFrame
-class TestSeries(unittest.TestCase):
+class TestDataFrame(unittest.TestCase):
def setUp(self):
# Data from http://www.nyc.gov/html/dcp/download/bytes/nybb_13a.zip
@@ -14,6 +15,8 @@
def test_from_file_(self):
self.assertTrue('geometry' in self.df)
self.assertTrue(len(self.df) == 5)
+ self.assertTrue(np.alltrue(self.df['BoroName'].values == np.array(['Staten Island',
+ 'Queens', 'Brooklyn', 'Manhattan', 'Bronx'])))
def test_to_json(self):
text = self.df.to_json()
|
a581d057366d8b4ae94754e18ef02e4ec59e3c05
|
gensysinfo.py
|
gensysinfo.py
|
#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
load, _, _, = psutil.getloadavg()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(load / numcpus))
time.sleep(20)
|
#!/usr/bin/env python
import psutil
import os
import time
def create_bar(filled):
low = '.'
high = '|'
if filled > 1:
low = str(int(filled))
high = str(int(filled + 1))
filled = filled - int(filled)
filled = int(filled * 10)
if filled < 5:
color = "green"
elif filled < 8:
color = "yellow"
else:
color = "red"
bar = '#[fg=' + color + ']['
bar += high * filled
bar += low * (10 - filled)
bar += ']#[fg=default]'
return bar
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
cpuutilblock.write(create_bar(psutil.cpu_percent())
time.sleep(20)
|
Use cpu_percent instead of getloadavg
|
Use cpu_percent instead of getloadavg
This is functionally similar and available in
older versions of psutil
|
Python
|
mit
|
wilfriedvanasten/miscvar,wilfriedvanasten/miscvar,wilfriedvanasten/miscvar
|
---
+++
@@ -27,10 +27,9 @@
while True:
meminfo = psutil.virtual_memory()
numcpus = psutil.cpu_count()
- load, _, _, = psutil.getloadavg()
with open(os.path.expanduser("~/.memblock"), "w") as memblock:
memblock.write(create_bar((meminfo.total - meminfo.available) / meminfo.total))
with open(os.path.expanduser("~/.cpuutilblock"), "w") as cpuutilblock:
- cpuutilblock.write(create_bar(load / numcpus))
+ cpuutilblock.write(create_bar(psutil.cpu_percent())
time.sleep(20)
|
97d1dd6b14cff5196ccd2e2efad8a0aba5bf240b
|
tests/test_money.py
|
tests/test_money.py
|
from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
|
# -*- coding: utf-8
from __future__ import unicode_literals
from decimal import Decimal
from django.test import TestCase
from shop.money.money_maker import AbstractMoney, MoneyMaker
class AbstractMoneyTest(TestCase):
def test_is_abstract(self):
self.assertRaises(TypeError, lambda: AbstractMoney(1))
class MoneyMakerTest(TestCase):
def test_create_new_money_type_without_argumens(self):
Money = MoneyMaker()
money = Money()
self.assertTrue(money.is_nan())
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
# different currency than the class, and the value is an
# instance of the money class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
value._currency_code = 'USD'
self.assertRaises(AssertionError, lambda: Money(value))
def test_create_instance_from_decimal(self):
Money = MoneyMaker()
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
def test_unicode(self):
Money = MoneyMaker()
value = Money(1)
self.assertEqual(unicode(value), "€ 1.00")
|
Add a test for AbstractMoney.__unicode__
|
Add a test for AbstractMoney.__unicode__
|
Python
|
bsd-3-clause
|
nimbis/django-shop,jrief/django-shop,awesto/django-shop,rfleschenberg/django-shop,jrief/django-shop,nimbis/django-shop,jrief/django-shop,awesto/django-shop,khchine5/django-shop,rfleschenberg/django-shop,jrief/django-shop,divio/django-shop,divio/django-shop,rfleschenberg/django-shop,nimbis/django-shop,khchine5/django-shop,khchine5/django-shop,awesto/django-shop,rfleschenberg/django-shop,divio/django-shop,khchine5/django-shop,nimbis/django-shop
|
---
+++
@@ -1,3 +1,6 @@
+# -*- coding: utf-8
+from __future__ import unicode_literals
+
from decimal import Decimal
from django.test import TestCase
@@ -20,7 +23,8 @@
def test_wrong_currency_raises_assertion_error(self):
# If we try to call a money class with a value that has a
- # different currency than the class, there should be an
+ # different currency than the class, and the value is an
+ # instance of the money class, there should be an
# AssertionError.
Money = MoneyMaker(currency_code='EUR')
value = Money()
@@ -32,3 +36,8 @@
value = Decimal("1.2")
inst = Money(value)
self.assertEquals(inst, value)
+
+ def test_unicode(self):
+ Money = MoneyMaker()
+ value = Money(1)
+ self.assertEqual(unicode(value), "€ 1.00")
|
913d06c323f188d7647d342257ab2c0eb153d879
|
tests/test_scale.py
|
tests/test_scale.py
|
from hypothesis import assume, given, strategies as st
from pytest import raises # type: ignore
from ppb_vector import Vector
from utils import angle_isclose, isclose, lengths, vectors
@given(v=vectors(), length=st.floats(max_value=0))
def test_scale_negative_length(v: Vector, length: float):
"""Test that Vector.scale_to raises ValueError on negative lengths."""
assume(length < 0)
with raises(ValueError):
v.scale_to(length)
@given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length.
Additionally, scale_to may raise ZeroDivisionError if the vector is null.
"""
try:
assert isclose(x.scale_to(length).length, length)
except ZeroDivisionError:
assert x == (0, 0)
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0)
try:
assert angle_isclose(x.scale_to(length).angle(x), 0)
except ZeroDivisionError:
assert x == (0, 0)
|
from hypothesis import assume, given, strategies as st
from pytest import raises # type: ignore
from ppb_vector import Vector
from utils import angle_isclose, isclose, lengths, vectors
@given(v=vectors(), length=st.floats(max_value=0))
def test_scale_negative_length(v: Vector, length: float):
"""Test that Vector.scale_to raises ValueError on negative lengths."""
assume(length < 0)
with raises(ValueError):
v.scale_to(length)
@given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
"""Test that the length of x.scale_to(length) is length with x non-null."""
assume(x != (0, 0))
assert isclose(x.scale_to(length).length, length)
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
assume(length > 0 and x != (0, 0))
assert angle_isclose(x.scale_to(length).angle(x), 0)
|
Simplify length & alignment tests
|
tests/scale: Simplify length & alignment tests
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
---
+++
@@ -15,21 +15,13 @@
@given(x=vectors(), length=lengths())
def test_scale_to_length(x: Vector, length: float):
- """Test that the length of x.scale_to(length) is length.
-
- Additionally, scale_to may raise ZeroDivisionError if the vector is null.
- """
- try:
- assert isclose(x.scale_to(length).length, length)
- except ZeroDivisionError:
- assert x == (0, 0)
+ """Test that the length of x.scale_to(length) is length with x non-null."""
+ assume(x != (0, 0))
+ assert isclose(x.scale_to(length).length, length)
@given(x=vectors(), length=lengths())
def test_scale_aligned(x: Vector, length: float):
"""Test that x.scale_to(length) is aligned with x."""
- assume(length > 0)
- try:
- assert angle_isclose(x.scale_to(length).angle(x), 0)
- except ZeroDivisionError:
- assert x == (0, 0)
+ assume(length > 0 and x != (0, 0))
+ assert angle_isclose(x.scale_to(length).angle(x), 0)
|
b19568c85458ac04b902dc03010e2d50177477e1
|
tests/test_utils.py
|
tests/test_utils.py
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common testing utilities.
"""
__authors__ = [
'"Augie Fackler" <durin42@gmail.com>',
]
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
"""
def __init__(self, path=None):
self.REQUEST = self.GET = self.POST = {}
self.path = path
|
#!/usr/bin/python2.5
#
# Copyright 2008 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common testing utilities.
"""
__authors__ = [
'"Augie Fackler" <durin42@gmail.com>',
'"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
Before using the object, start should be called, when done (and
before calling start on a new request), end should be called.
"""
def __init__(self, path=None):
"""Creates a new empty request object.
self.REQUEST, self.GET and self.POST are set to an empty
dictionary, and path to the value specified.
"""
self.REQUEST = {}
self.GET = {}
self.POST = {}
self.path = path
def start(self):
"""Readies the core for a new request.
"""
core = callback.getCore()
core.startNewRequest(self)
def end(self):
"""Finishes up the current request.
"""
core = callback.getCore()
core.endRequest(self, False)
|
Add a start and end method to MockRequest
|
Add a start and end method to MockRequest
|
Python
|
apache-2.0
|
MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging,MatthewWilkes/mw4068-packaging
|
---
+++
@@ -19,12 +19,42 @@
__authors__ = [
'"Augie Fackler" <durin42@gmail.com>',
+ '"Sverre Rabbelier" <sverre@rabbelier.nl>',
]
+
+
+from soc.modules import callback
class MockRequest(object):
"""Shared dummy request object to mock common aspects of a request.
+
+ Before using the object, start should be called, when done (and
+ before calling start on a new request), end should be called.
"""
+
def __init__(self, path=None):
- self.REQUEST = self.GET = self.POST = {}
+ """Creates a new empty request object.
+
+ self.REQUEST, self.GET and self.POST are set to an empty
+ dictionary, and path to the value specified.
+ """
+
+ self.REQUEST = {}
+ self.GET = {}
+ self.POST = {}
self.path = path
+
+ def start(self):
+ """Readies the core for a new request.
+ """
+
+ core = callback.getCore()
+ core.startNewRequest(self)
+
+ def end(self):
+ """Finishes up the current request.
+ """
+
+ core = callback.getCore()
+ core.endRequest(self, False)
|
2ed0f0e9f875722d2ae21d595701d37646b74885
|
tingbot/__init__.py
|
tingbot/__init__.py
|
from . import platform_specific, input
from .graphics import screen, Surface, Image
from .run_loop import main_run_loop, every
from .input import touch
from .button import press
from .web import webhook
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_after_action_callback(screen.update_if_needed)
main_run_loop.add_wait_callback(input.poll)
# in case screen updates happen in input.poll...
main_run_loop.add_wait_callback(screen.update_if_needed)
main_run_loop.run()
__all__ = ['run', 'screen', 'Surface', 'Image', 'every', 'touch', 'press', 'button', 'webhook']
__author__ = 'Joe Rickerby'
__email__ = 'joerick@mac.com'
__version__ = '0.3'
|
try:
import pygame
except ImportError:
print 'Failed to import pygame'
print '-----------------------'
print ''
print 'tingbot-python requires pygame. Please download and install pygame 1.9.1'
print 'or later from http://www.pygame.org/download.shtml'
print ''
print "If you're using a virtualenv, you should make the virtualenv with the "
print "--system-site-packages flag so the system-wide installation is still "
print "accessible."
print ''
print '-----------------------'
print ''
raise
from . import platform_specific, input
from .graphics import screen, Surface, Image
from .run_loop import main_run_loop, every
from .input import touch
from .button import press
from .web import webhook
platform_specific.fixup_env()
def run(loop=None):
if loop is not None:
every(seconds=1.0/30)(loop)
main_run_loop.add_after_action_callback(screen.update_if_needed)
main_run_loop.add_wait_callback(input.poll)
# in case screen updates happen in input.poll...
main_run_loop.add_wait_callback(screen.update_if_needed)
main_run_loop.run()
__all__ = ['run', 'screen', 'Surface', 'Image', 'every', 'touch', 'press', 'button', 'webhook']
__author__ = 'Joe Rickerby'
__email__ = 'joerick@mac.com'
__version__ = '0.3'
|
Add import-time check for pygame (since we can't install automatically)
|
Add import-time check for pygame (since we can't install automatically)
|
Python
|
bsd-2-clause
|
furbrain/tingbot-python
|
---
+++
@@ -1,3 +1,20 @@
+try:
+ import pygame
+except ImportError:
+ print 'Failed to import pygame'
+ print '-----------------------'
+ print ''
+ print 'tingbot-python requires pygame. Please download and install pygame 1.9.1'
+ print 'or later from http://www.pygame.org/download.shtml'
+ print ''
+ print "If you're using a virtualenv, you should make the virtualenv with the "
+ print "--system-site-packages flag so the system-wide installation is still "
+ print "accessible."
+ print ''
+ print '-----------------------'
+ print ''
+ raise
+
from . import platform_specific, input
from .graphics import screen, Surface, Image
|
8f96a89e14bfcb8ed66e0e276966df609b7651c1
|
barsystem/setup.py
|
barsystem/setup.py
|
from setuptools import setup, find_packages
setup(
name='barsystem',
version='1.0.0',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
entry_points={
'console_scripts': [
'barsystem-installer = barsystem.install:main'
]
},
install_requires=[
'django>=1.10,<=1.10.99',
'django-translatable',
'pytz',
'python-dateutil',
'Pillow',
'uwsgi',
'paho-mqtt'
],
license='MIT',
description='',
long_description='',
url='https://github.com/TkkrLab/barsystem',
author='Jasper Seidel',
author_email='code@jawsper.nl',
)
|
from setuptools import setup, find_packages
setup(
name='barsystem',
version='1.0.0',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
entry_points={
'console_scripts': [
'barsystem-installer = barsystem.install:main'
]
},
install_requires=[
'django>=1.10,<=1.10.99',
'django-translatable',
'pytz',
'python-dateutil',
'Pillow',
],
extras_require={
'uwsgi': ['uwsgi'],
'mqtt': ['paho-mqtt'],
},
license='MIT',
description='',
long_description='',
url='https://github.com/TkkrLab/barsystem',
author='Jasper Seidel',
author_email='code@jawsper.nl',
)
|
Move some requirements to extras.
|
Move some requirements to extras.
|
Python
|
mit
|
TkkrLab/barsystem,TkkrLab/barsystem,TkkrLab/barsystem
|
---
+++
@@ -19,9 +19,11 @@
'pytz',
'python-dateutil',
'Pillow',
- 'uwsgi',
- 'paho-mqtt'
],
+ extras_require={
+ 'uwsgi': ['uwsgi'],
+ 'mqtt': ['paho-mqtt'],
+ },
license='MIT',
description='',
|
91c6c7b8e8077a185e8a62af0c3bcb74d4026e7c
|
tests/search.py
|
tests/search.py
|
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
self.assertIsInstance(search[0], pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(search[0].id, 18166)
|
import pycomicvine
import unittest
api_key = "476302e62d7e8f8f140182e36aebff2fe935514b"
class TestSearch(unittest.TestCase):
def test_search_resource_type(self):
search = pycomicvine.Search(
resources="volume",
query="Angel"
)
for v in search:
self.assertIsInstance(v, pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
query="The Walking Dead",
field_list=["id"]
)
self.assertNotEqual(len(search),0)
self.assertEqual(18166, [s.id for s in search])
|
Check every result in Search test
|
Check every result in Search test
|
Python
|
mit
|
authmillenon/pycomicvine
|
---
+++
@@ -9,7 +9,8 @@
resources="volume",
query="Angel"
)
- self.assertIsInstance(search[0], pycomicvine.Volume)
+ for v in search:
+ self.assertIsInstance(v, pycomicvine.Volume)
def test_search_id(self):
search = pycomicvine.Search(
@@ -17,5 +18,5 @@
field_list=["id"]
)
self.assertNotEqual(len(search),0)
- self.assertEqual(search[0].id, 18166)
+ self.assertEqual(18166, [s.id for s in search])
|
709d6c530296fe9e0b03ad5ed28facd7c69b93fa
|
importjson.py
|
importjson.py
|
import json
import pprint
import os
with open('json/test.json') as json_data:
d = json.load(json_data)
# print(d)
# pprint.pprint(d)
for stat_categories in d["divisionteamstandings"]["division"][0]["teamentry"][0]["stats"]:
pprint.pprint(stat_categories)
|
import json
import pprint
import os
#Open the JSON file that includes headers
with open('json/20160927-division-team-standings.json') as file:
alltext = file.readlines() #Put each line into a list
for lines in alltext:
if lines.startswith('{'):
rawdata = lines
data = json.loads(rawdata)
for stat_categories in data["divisionteamstandings"]["division"][0]["teamentry"][0]["stats"]:
pprint.pprint(stat_categories)
|
Create JSON importer from raw JSON file including web headers and pretty print all of the stat categories for one file
|
Create JSON importer from raw JSON file including web headers and pretty print all of the stat categories for one file
|
Python
|
mit
|
prcutler/nflpool,prcutler/nflpool
|
---
+++
@@ -2,10 +2,14 @@
import pprint
import os
+#Open the JSON file that includes headers
-with open('json/test.json') as json_data:
- d = json.load(json_data)
-# print(d)
-# pprint.pprint(d)
- for stat_categories in d["divisionteamstandings"]["division"][0]["teamentry"][0]["stats"]:
- pprint.pprint(stat_categories)
+with open('json/20160927-division-team-standings.json') as file:
+ alltext = file.readlines() #Put each line into a list
+
+for lines in alltext:
+ if lines.startswith('{'):
+ rawdata = lines
+ data = json.loads(rawdata)
+ for stat_categories in data["divisionteamstandings"]["division"][0]["teamentry"][0]["stats"]:
+ pprint.pprint(stat_categories)
|
7faa73b5046fb87099d955705c4f00c5240f3544
|
running.py
|
running.py
|
import tcxparser
from darksky import forecast
from configparser import ConfigParser
# Darksky weather API.
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
print(tcx.duration)
boston = forecast(darksky_key, 42.3601, -71.0589)
print(boston)
|
import tcxparser
from configparser import ConfigParser
from datetime import datetime
import urllib.request
import dateutil.parser
t = '1984-06-02T19:05:00.000Z'
# Darksky weather API
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
run_time = tcx.completed_at
def convert_time_to_unix(time):
parsed_time = dateutil.parser.parse(time)
time_in_unix = parsed_time.strftime('%s')
return time_in_unix
unix_run_time = convert_time_to_unix(run_time)
darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/42.3601,-71.0589," + unix_run_time + "?exclude=currently,flags").read()
print(darksky_request)
class getWeather:
def __init__(self, date, time):
self.date = date
self.time = time
def goodbye(self, date):
print("my name is " + date)
|
Call Darksky API with TCX run time Use simpler GET request to Darksky API rather than a third party Python wrapper
|
Call Darksky API with TCX run time
Use simpler GET request to Darksky API rather than a third party Python
wrapper
|
Python
|
mit
|
briansuhr/slowburn
|
---
+++
@@ -1,15 +1,33 @@
import tcxparser
-from darksky import forecast
from configparser import ConfigParser
+from datetime import datetime
+import urllib.request
+import dateutil.parser
-# Darksky weather API.
+t = '1984-06-02T19:05:00.000Z'
+# Darksky weather API
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
+run_time = tcx.completed_at
-print(tcx.duration)
-boston = forecast(darksky_key, 42.3601, -71.0589)
-print(boston)
+def convert_time_to_unix(time):
+ parsed_time = dateutil.parser.parse(time)
+ time_in_unix = parsed_time.strftime('%s')
+ return time_in_unix
+
+unix_run_time = convert_time_to_unix(run_time)
+darksky_request = urllib.request.urlopen("https://api.darksky.net/forecast/" + darksky_key + "/42.3601,-71.0589," + unix_run_time + "?exclude=currently,flags").read()
+print(darksky_request)
+
+
+class getWeather:
+ def __init__(self, date, time):
+ self.date = date
+ self.time = time
+
+ def goodbye(self, date):
+ print("my name is " + date)
|
19fb86f8b3a2307489f926d9d5d78bd84c6b05a1
|
Sketches/MH/TimerMixIn.py
|
Sketches/MH/TimerMixIn.py
|
#!/usr/bin/env python
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
|
#!/usr/bin/env python
from Axon.Component import component
from threading import Timer
class TimerMixIn(object):
def __init__(self, *argl, **argd):
super(TimerMixIn,self).__init__(*argl,**argd)
self.timer = None
self.timerSuccess = True
def startTimer(self, secs):
if self.timer is not None:
self.cancelTimer()
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
def cancelTimer(self):
if self.timer is not None and self.timer:
self.timer.cancel()
self.timer = None
self.timerSuccess = False
def timerRunning(self):
return self.timer is not None
def timerWasCancelled(self):
return not self.timerSuccess
def __handleTimerDone(self):
self.scheduler.wakeThread(self)
self.timer = None
self.timerSuccess = True
if __name__ == "__main__":
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.Util.Console import ConsoleEchoer
class TestComponent(TimerMixIn,component):
def __init__(self):
super(TestComponent,self).__init__()
def main(self):
count = 0
while True:
self.startTimer(0.5)
while self.timerRunning():
self.pause()
yield 1
self.send(count, "outbox")
count=count+1
Pipeline(TestComponent(),ConsoleEchoer()).run()
|
Handle situation if timer is already running.
|
Handle situation if timer is already running.
|
Python
|
apache-2.0
|
sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia,sparkslabs/kamaelia
|
---
+++
@@ -11,6 +11,8 @@
self.timerSuccess = True
def startTimer(self, secs):
+ if self.timer is not None:
+ self.cancelTimer()
self.timer = Timer(secs, self.__handleTimerDone)
self.timerSuccess = False
self.timer.start()
|
2df886059a9edd8d75fdb255fc185c2f96a02c29
|
user/signals.py
|
user/signals.py
|
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
# MAke user organizer if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not REGEX_PATTERN or not created:
return None
if re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
import re
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
from user import tokens
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
# Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
if not created:
return None
if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
instance.save()
if DEV_EMAILS and instance.email in DEV_EMAILS:
instance.is_admin = True
instance.save()
# Send user verification
@receiver(post_save, sender=User)
def user_verify_email(sender, instance, created, *args, **kwargs):
if created and not instance.email_verified:
msg = tokens.generate_verify_email(instance)
msg.send()
|
Make developers an admin on registration
|
Make developers an admin on registration
|
Python
|
mit
|
hackupc/backend,hackupc/backend,hackupc/backend,hackupc/backend
|
---
+++
@@ -8,15 +8,21 @@
from user.models import User
REGEX_PATTERN = getattr(settings, 'REGEX_HACKATHON_ORGANIZER_EMAIL', None)
+DEV_EMAILS = getattr(settings, 'HACKATHON_DEV_EMAILS', None)
-# MAke user organizer if fits regex
+# Make user organizer or admin if fits regex
@receiver(post_save, sender=User)
def user_organizer(sender, instance, created, *args, **kwargs):
- if not REGEX_PATTERN or not created:
+ if not created:
return None
- if re.match(REGEX_PATTERN, instance.email):
+
+ if REGEX_PATTERN and re.match(REGEX_PATTERN, instance.email):
instance.is_organizer = True
+ instance.save()
+
+ if DEV_EMAILS and instance.email in DEV_EMAILS:
+ instance.is_admin = True
instance.save()
|
87b3d17bcee42630ec502475e67d5f58cee4f577
|
cafe/utilities.py
|
cafe/utilities.py
|
from six import string_types
def listify(arg):
"""
Simple utility method to ensure an argument provided is a list. If the provider argument is not an instance of
`list`, then we return [arg], else arg is returned.
:type arg: list
:rtype: list
"""
if not isinstance(arg, list):
return [arg]
return arg
def is_str(arg):
"""
A py2/3 compatible 'is string' check.
:type arg:
:rtype:
"""
return isinstance(arg, string_types)
|
from os import getenv
from six import string_types
def listify(arg):
"""
Simple utility method to ensure an argument provided is a list. If the provider argument is not an instance of
`list`, then we return [arg], else arg is returned.
:type arg: list
:rtype: list
"""
if not isinstance(arg, list):
return [arg]
return arg
def is_str(arg):
"""
A py2/3 compatible 'is string' check.
:type arg:
:rtype:
"""
return isinstance(arg, string_types)
def resolve_setting(default, arg_value=None, env_var=None, config_value=None):
"""
Resolves a setting for a configuration option. The winning value is chosen from multiple methods of configuration,
in the following order of priority (top first):
- Explicitly passed argument
- Environment variable
- Configuration file entry
- Default
:param arg_value: Explicitly passed value
:param env_var: Environment variable name
:type env_var: string or None
:param config_value: Configuration entry
:param default: Default value to if there are no overriding options
:return: Configuration value
"""
if arg_value is not None:
return arg_value
else:
env_value = getenv(env_var)
if env_value is not None:
return env_value
else:
if config_value is not None:
return config_value
else:
return default
|
Add function to resolve settings from multiple sources
|
Add function to resolve settings from multiple sources
utilities.resolve_setting() takes values for a setting from multiple sources and picks the appropriate value in order of source priority.
|
Python
|
apache-2.0
|
abn/python-cafe
|
---
+++
@@ -1,3 +1,4 @@
+from os import getenv
from six import string_types
@@ -22,3 +23,32 @@
:rtype:
"""
return isinstance(arg, string_types)
+
+
+def resolve_setting(default, arg_value=None, env_var=None, config_value=None):
+ """
+ Resolves a setting for a configuration option. The winning value is chosen from multiple methods of configuration,
+ in the following order of priority (top first):
+ - Explicitly passed argument
+ - Environment variable
+ - Configuration file entry
+ - Default
+
+ :param arg_value: Explicitly passed value
+ :param env_var: Environment variable name
+ :type env_var: string or None
+ :param config_value: Configuration entry
+ :param default: Default value to if there are no overriding options
+ :return: Configuration value
+ """
+ if arg_value is not None:
+ return arg_value
+ else:
+ env_value = getenv(env_var)
+ if env_value is not None:
+ return env_value
+ else:
+ if config_value is not None:
+ return config_value
+ else:
+ return default
|
c11152dc83416efb33bd4c8286633a311430c0f6
|
mpsort/__init__.py
|
mpsort/__init__.py
|
from .binding import sort as _sort
import numpy
from numpy.lib.recfunctions import append_fields
try:
unicode = unicode
except NameError:
# 'unicode' is undefined, must be Python 3
str = str
unicode = str
bytes = bytes
basestring = (str,bytes)
else:
# 'unicode' exists, must be Python 2
str = str
unicode = unicode
bytes = str
basestring = basestring
def sort(source, orderby, out=None, comm=None):
key = orderby
if isinstance(key, basestring):
return _sort(source, key, out, comm=comm)
data1 = numpy.empty(
len(source), dtype=[
('data', (source.dtype, source.shape[1:])),
('index', (key.dtype, key.shape[1:]))])
data1['data'][...] = source
data1['index'][...] = key
_sort(data1, orderby='index', comm=comm)
if out is None:
out = source
out[...] = data1['data'][...]
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
from .binding import sort as _sort
import numpy
from numpy.lib.recfunctions import append_fields
try:
unicode = unicode
except NameError:
# 'unicode' is undefined, must be Python 3
str = str
unicode = str
bytes = bytes
basestring = (str,bytes)
else:
# 'unicode' exists, must be Python 2
str = str
unicode = unicode
bytes = str
basestring = basestring
def sort(source, orderby, out=None, comm=None):
key = orderby
if isinstance(key, basestring):
return _sort(source, key, out, comm=comm)
data1 = numpy.empty(
len(source), dtype=[
('data', (source.dtype, source.shape[1:])),
('index', (key.dtype, key.shape[1:]))])
data1['data'][...] = source
data1['index'][...] = key
if out is None:
out = source
_sort(data1, orderby='index', comm=comm)
out[...] = data1['data'][...]
else:
data2 = numpy.empty(
len(out), dtype=[
('data', (out.dtype, out.shape[1:])),
('index', (key.dtype, key.shape[1:]))])
_sort(data1, orderby='index', out=data2, comm=comm)
out[...] = data2['data'][...]
from numpy.testing import Tester
test = Tester().test
bench = Tester().bench
|
Fix out place mismatched out array size.
|
Fix out place mismatched out array size.
|
Python
|
bsd-2-clause
|
rainwoodman/MP-sort,rainwoodman/MP-sort,rainwoodman/MP-sort
|
---
+++
@@ -31,12 +31,17 @@
data1['data'][...] = source
data1['index'][...] = key
- _sort(data1, orderby='index', comm=comm)
-
if out is None:
out = source
-
- out[...] = data1['data'][...]
+ _sort(data1, orderby='index', comm=comm)
+ out[...] = data1['data'][...]
+ else:
+ data2 = numpy.empty(
+ len(out), dtype=[
+ ('data', (out.dtype, out.shape[1:])),
+ ('index', (key.dtype, key.shape[1:]))])
+ _sort(data1, orderby='index', out=data2, comm=comm)
+ out[...] = data2['data'][...]
from numpy.testing import Tester
|
95186f684328d5b84611f405d47d474c53cad619
|
cat.py
|
cat.py
|
import io
import aiohttp
from discord.ext import commands
import yaml
class Cat:
def __init__(self, bot):
self.bot = bot
with open('config.yaml') as file:
data = yaml.load(file)
self.key = data.get('cat_key', '')
self.url = 'http://thecatapi.com/api/images/get'
self.params = {'api_key': self.key,
'type': 'png,jpg'}
@commands.command()
async def cat(self, ctx):
s = self.bot.session
async with ctx.typing(), s.get(self.url, params=self.params) as resp:
image = io.BytesIO(await resp.content.read())
ext = resp.headers['Content-Type'].partition('/')[2]
await ctx.send(file=image, filename=f'{ctx.message.id}.{ext}')
def setup(bot):
bot.add_cog(Cat(bot))
|
import io
import aiohttp
import discord
from discord.ext import commands
from lxml import etree
import yaml
class Cat:
def __init__(self, bot):
self.bot = bot
with open('config.yaml') as file:
data = yaml.load(file)
self.key = data.get('cat_key', '')
self.url = 'http://thecatapi.com/api/images/get'
self.params = {'api_key': self.key,
'type': 'png,jpg',
'format': 'xml',
}
@commands.command()
async def cat(self, ctx):
session = self.bot.session
async with ctx.typing():
async with session.get(self.url, params=self.params) as resp:
root = etree.fromstring(await resp.text())
url = root.find('.//url').text
embed = discord.Embed()
embed.set_image(url=url)
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(Cat(bot))
|
Send image in embed because aiohttp doesn't know how to parse links
|
Send image in embed because aiohttp doesn't know how to parse links
|
Python
|
mit
|
BeatButton/beattie,BeatButton/beattie-bot
|
---
+++
@@ -1,7 +1,9 @@
import io
import aiohttp
+import discord
from discord.ext import commands
+from lxml import etree
import yaml
@@ -13,15 +15,21 @@
self.key = data.get('cat_key', '')
self.url = 'http://thecatapi.com/api/images/get'
self.params = {'api_key': self.key,
- 'type': 'png,jpg'}
+ 'type': 'png,jpg',
+ 'format': 'xml',
+ }
@commands.command()
async def cat(self, ctx):
- s = self.bot.session
- async with ctx.typing(), s.get(self.url, params=self.params) as resp:
- image = io.BytesIO(await resp.content.read())
- ext = resp.headers['Content-Type'].partition('/')[2]
- await ctx.send(file=image, filename=f'{ctx.message.id}.{ext}')
+ session = self.bot.session
+ async with ctx.typing():
+ async with session.get(self.url, params=self.params) as resp:
+ root = etree.fromstring(await resp.text())
+ url = root.find('.//url').text
+ embed = discord.Embed()
+ embed.set_image(url=url)
+ await ctx.send(embed=embed)
+
def setup(bot):
|
2ec1975da12cb9d95b1e1db7820f30850e075e4e
|
running.py
|
running.py
|
import tcxparser
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
print(tcx.duration)
|
import tcxparser
from darksky import forecast
from configparser import ConfigParser
# Darksky weather API.
# Create config file manually
parser = ConfigParser()
parser.read('slowburn.config', encoding='utf-8')
darksky_key = parser.get('darksky', 'key')
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
print(tcx.duration)
boston = forecast(darksky_key, 42.3601, -71.0589)
print(boston)
|
Add sample Darksky API call
|
Add sample Darksky API call
|
Python
|
mit
|
briansuhr/slowburn
|
---
+++
@@ -1,3 +1,15 @@
import tcxparser
+from darksky import forecast
+from configparser import ConfigParser
+
+# Darksky weather API.
+# Create config file manually
+parser = ConfigParser()
+parser.read('slowburn.config', encoding='utf-8')
+darksky_key = parser.get('darksky', 'key')
+
tcx = tcxparser.TCXParser('gps_logs/2017-06-15_Running.tcx')
+
print(tcx.duration)
+boston = forecast(darksky_key, 42.3601, -71.0589)
+print(boston)
|
4d1444e2f2a455e691342a82f0e116e210593411
|
s01/c01.py
|
s01/c01.py
|
"""Set 01 - Challenge 01."""
import base64
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = b'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded byte string."""
hex_data = bytearray.fromhex(hex_string)
# Strip trailing newline
return base64.encodebytes(hex_data)[:-1]
assert hex2b64(hex_string) == b64_string
|
"""Set 01 - Challenge 01."""
import binascii
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
b64_string = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
"""Convert a hex string into a base64 encoded string."""
return binascii.b2a_base64(binascii.a2b_hex(hex_string)).strip()
assert hex2b64(hex_string) == b64_string
|
Revert "Updated function to work on bytes rather than binascii functions."
|
Revert "Updated function to work on bytes rather than binascii functions."
This reverts commit 25176b64aed599059e4b552fbd76c5f4bc28434e.
|
Python
|
mit
|
sornars/matasano-challenges-py
|
---
+++
@@ -1,14 +1,13 @@
"""Set 01 - Challenge 01."""
-import base64
+
+import binascii
hex_string = ('49276d206b696c6c696e6720796f757220627261696e206c696b65206120706f'
'69736f6e6f7573206d757368726f6f6d')
-b64_string = b'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
+b64_string = 'SSdtIGtpbGxpbmcgeW91ciBicmFpbiBsaWtlIGEgcG9pc29ub3VzIG11c2hyb29t'
def hex2b64(hex_string):
- """Convert a hex string into a base64 encoded byte string."""
- hex_data = bytearray.fromhex(hex_string)
- # Strip trailing newline
- return base64.encodebytes(hex_data)[:-1]
+ """Convert a hex string into a base64 encoded string."""
+ return binascii.b2a_base64(binascii.a2b_hex(hex_string)).strip()
assert hex2b64(hex_string) == b64_string
|
3d414a7bc4b5e6c6c2b1ec8f44b69bab537fd50d
|
wsgi_general.py
|
wsgi_general.py
|
import DQXUtils
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
|
import DQXUtils
import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
if environ['PATH_INFO'] == '/':
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
with DQXDbTools.DBCursor() as cur:
cur.execute('select id from datasetindex')
datasets = [d[0] for d in cur.fetchall()]
#Redirect to specific dataset
path = environ['PATH_INFO'].split('/')
if len(path) >= 2 and path[-2] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
return
if path[-1] in datasets:
start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
return
#Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
with open('static/404.html') as page:
yield page.read()
except IOError:
yield '404 Page Not Found'
return
|
Allow URLS to have dataset names in
|
Allow URLS to have dataset names in
|
Python
|
agpl-3.0
|
cggh/DQXServer
|
---
+++
@@ -1,5 +1,5 @@
import DQXUtils
-
+import DQXDbTools
def application(environ, start_response):
#For the root we do a relative redirect to index.html, hoping the app has one
@@ -7,6 +7,20 @@
start_response('301 Moved Permanently', [('Location', 'index.html'),])
return
+ with DQXDbTools.DBCursor() as cur:
+ cur.execute('select id from datasetindex')
+ datasets = [d[0] for d in cur.fetchall()]
+
+ #Redirect to specific dataset
+ path = environ['PATH_INFO'].split('/')
+ if len(path) >= 2 and path[-2] in datasets:
+ start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-2]),])
+ return
+ if path[-1] in datasets:
+ start_response('301 Moved Permanently', [('Location', '../index.html?dataset='+path[-1]),])
+ return
+
+ #Everything else is 404
DQXUtils.LogServer('404:' + environ['PATH_INFO'])
start_response('404 Not Found', [])
try:
|
acdcb3d01dea2af0dc94c22ee5f40304da8d462a
|
src/pycrunchbase/resource/investment.py
|
src/pycrunchbase/resource/investment.py
|
import six
from .node import Node
@six.python_2_unicode_compatible
class Investment(Node):
"""Represents a Investment (investor-investment) on CrunchBase"""
KNOWN_PROPERTIES = [
'type',
'uuid',
'money_invested',
'money_invested_currency_code',
'money_invested_usd',
]
KNOWN_RELATIONSHIPS = [
'funding_round',
'invested_in',
]
def __str__(self):
return u'{series} {invested_in}'.format(
series=self.funding_round.series,
invested_in=self.invested_in.name,
)
def __repr__(self):
return self.__str__()
|
import six
from .node import Node
@six.python_2_unicode_compatible
class Investment(Node):
"""Represents a Investment (investor-investment) on CrunchBase"""
KNOWN_PROPERTIES = [
'type',
'uuid',
'money_invested',
'money_invested_currency_code',
'money_invested_usd',
]
KNOWN_RELATIONSHIPS = [
'funding_round',
'invested_in',
'investors',
]
def __str__(self):
if self.money_invested:
return u'{invested}'.format(
self.money_invested
)
if hasattr(self, 'investors'):
return u'{investors}'.format(self.investors)
if self.type:
return u'{type}'.format(self.type)
return u'Investment'
def __repr__(self):
return self.__str__()
|
Add new relationship to Investment resource
|
Add new relationship to Investment resource
|
Python
|
mit
|
ngzhian/pycrunchbase,SidSachdev/pycrunchbase,alabid/pycrunchbase
|
---
+++
@@ -18,13 +18,22 @@
KNOWN_RELATIONSHIPS = [
'funding_round',
'invested_in',
+ 'investors',
]
def __str__(self):
- return u'{series} {invested_in}'.format(
- series=self.funding_round.series,
- invested_in=self.invested_in.name,
- )
+ if self.money_invested:
+ return u'{invested}'.format(
+ self.money_invested
+ )
+
+ if hasattr(self, 'investors'):
+ return u'{investors}'.format(self.investors)
+
+ if self.type:
+ return u'{type}'.format(self.type)
+
+ return u'Investment'
def __repr__(self):
return self.__str__()
|
ef495fe29566f575dcb18d5edf0e0301af095aee
|
survey/tests/views/test_confirm_view.py
|
survey/tests/views/test_confirm_view.py
|
# -*- coding: utf-8 -*-
from django.urls.base import reverse
from survey.models import Response, Survey
from survey.tests.base_test import BaseTest
class TestConfirmView(BaseTest):
def get_first_response(self, survey_name):
survey = Survey.objects.get(name=survey_name)
responses = Response.objects.filter(survey=survey)
response = responses.all()[0]
url = reverse("survey-confirmation", args=(response.interview_uuid,))
return self.client.get(url)
def test_editable_survey(self):
response = self.get_first_response("Unicode问卷")
self.assertEquals(response.status_code, 200)
self.assertContains(response, "come back and change them")
def test_uneditable_survey(self):
response = self.get_first_response("Test survëy")
self.assertEquals(response.status_code, 200)
self.assertNotContains(response, "come back and change them")
|
# -*- coding: utf-8 -*-
from django.urls.base import reverse
from survey.models import Response, Survey
from survey.tests.base_test import BaseTest
class TestConfirmView(BaseTest):
def get_first_response(self, survey_name):
survey = Survey.objects.get(name=survey_name)
responses = Response.objects.filter(survey=survey)
response = responses.all()[0]
url = reverse("survey-confirmation", args=(response.interview_uuid,))
return self.client.get(url)
def test_editable_survey(self):
response = self.get_first_response("Unicode问卷")
self.assertEqual(response.status_code, 200)
self.assertContains(response, "come back and change them")
def test_uneditable_survey(self):
response = self.get_first_response("Test survëy")
self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "come back and change them")
|
Fix W1505: Using deprecated method assertEquals()
|
Fix W1505: Using deprecated method assertEquals()
|
Python
|
agpl-3.0
|
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
|
---
+++
@@ -16,10 +16,10 @@
def test_editable_survey(self):
response = self.get_first_response("Unicode问卷")
- self.assertEquals(response.status_code, 200)
+ self.assertEqual(response.status_code, 200)
self.assertContains(response, "come back and change them")
def test_uneditable_survey(self):
response = self.get_first_response("Test survëy")
- self.assertEquals(response.status_code, 200)
+ self.assertEqual(response.status_code, 200)
self.assertNotContains(response, "come back and change them")
|
ed0551007c2f7bac2639aad8ced0b084d26efcb4
|
src/utils.py
|
src/utils.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
WithABCMeta = ABCMeta('WithABCMeta', (object,), {})
class Disposable(WithABCMeta):
'''
Exposes method to release resources held by the class.
'''
_disposed = False
def dispose(self):
'''
Disposes of resources that are owned by the object. This method is idempotent.
'''
if not self._disposed:
self._dispose()
self._disposed = True
@abstractmethod
def _dispose(self):
'''
Performs actual disposing, needs to be overridden by subclasses.
'''
def __del__(self):
self.dispose()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from abc import ABCMeta, abstractmethod
WithABCMeta = ABCMeta(str('WithABCMeta'), (object,), {})
class Disposable(WithABCMeta):
'''
Exposes method to release resources held by the class.
'''
_disposed = False
def dispose(self):
'''
Disposes of resources that are owned by the object. This method is idempotent.
'''
if not self._disposed:
self._dispose()
self._disposed = True
@abstractmethod
def _dispose(self):
'''
Performs actual disposing, needs to be overridden by subclasses.
'''
def __del__(self):
self.dispose()
|
Fix minor bug for Python 2.x
|
Fix minor bug for Python 2.x
|
Python
|
mit
|
jstasiak/python-cg,jstasiak/python-cg
|
---
+++
@@ -3,7 +3,7 @@
from abc import ABCMeta, abstractmethod
-WithABCMeta = ABCMeta('WithABCMeta', (object,), {})
+WithABCMeta = ABCMeta(str('WithABCMeta'), (object,), {})
class Disposable(WithABCMeta):
'''
|
1072e6225a49f409b2b20b000ccdc6f70f0c45e8
|
spotify.py
|
spotify.py
|
import sys
import random
from pytz import timezone
from datetime import datetime
import pytz
from libs import post_text
import spotipy
import os
from spotipy.oauth2 import SpotifyClientCredentials
'''
sadness_texts = [line.strip() for line in open('list of saddness.txt')]
central = timezone('US/Central')
now = datetime.now(tz=pytz.utc)
'''
username = os.environ.get("USERNAME")
client_id = os.environ.get("SPOTIPY_CLIENT_ID")
client_secret = os.environ.get("SPOTIPY_CLIENT_SECRET")
client_credentials_manager = SpotifyClientCredentials()
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
results = sp.user_playlist(username, sys.argv[2], 'tracks,next')
tracks = results['tracks']
all_tracks = tracks['items']
while tracks['next']:
tracks = sp.next(tracks)
all_tracks += tracks['items']
random_track = random.choice(all_tracks)
if(datetime.datetime.today().weekday() == 0):
post_text(u'\U0001F3B5\U0001F4C5: ' + random_track['track']['name'] + ' - ' + random_track['track']['artists'][0]['name'] + ' ' + random_track['track']['external_urls']['spotify'], sys.argv[1])
|
import sys
import random
from pytz import timezone
from datetime import datetime
import pytz
from libs import post_text
import spotipy
import os
from spotipy.oauth2 import SpotifyClientCredentials
'''
sadness_texts = [line.strip() for line in open('list of saddness.txt')]
central = timezone('US/Central')
now = datetime.now(tz=pytz.utc)
'''
username = os.environ.get("USERNAME")
client_id = os.environ.get("SPOTIPY_CLIENT_ID")
client_secret = os.environ.get("SPOTIPY_CLIENT_SECRET")
client_credentials_manager = SpotifyClientCredentials()
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
results = sp.user_playlist(username, sys.argv[2], 'tracks,next')
tracks = results['tracks']
all_tracks = tracks['items']
while tracks['next']:
tracks = sp.next(tracks)
all_tracks += tracks['items']
random_track = random.choice(all_tracks)
if(datetime.datetime.today().weekday() == 0):
post_text(u'\U0001F3B5\U0001F4C5: ' + random_track['track']['name'] + ' - ' + random_track['track']['artists'][0]['name'] + ' ' + random_track['track']['external_urls']['spotify'], sys.argv[1])
else:
post_text(datetime.datetime.today().weekday(), sys.argv[1])
|
Debug song of the week
|
Debug song of the week
|
Python
|
mit
|
Boijangle/GroupMe-Message-Bot
|
---
+++
@@ -30,3 +30,5 @@
random_track = random.choice(all_tracks)
if(datetime.datetime.today().weekday() == 0):
post_text(u'\U0001F3B5\U0001F4C5: ' + random_track['track']['name'] + ' - ' + random_track['track']['artists'][0]['name'] + ' ' + random_track['track']['external_urls']['spotify'], sys.argv[1])
+else:
+ post_text(datetime.datetime.today().weekday(), sys.argv[1])
|
498552599753f07d179025b5de1e8207ec2b94cd
|
test/unit/util/test_multipart_stream.py
|
test/unit/util/test_multipart_stream.py
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
import pytest
from boxsdk.util.multipart_stream import MultipartStream
@pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'}))
def multipart_stream_data(request):
return request.param
@pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'}))
def multipart_stream_files(request):
return request.param
def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files):
if not multipart_stream_data and not multipart_stream_files:
pytest.xfail('Encoder does not support empty fields.')
stream = MultipartStream(multipart_stream_data, multipart_stream_files)
encoded_stream = stream.to_string()
data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()]
file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()]
assert -1 not in data_indices
assert -1 not in file_indices
assert all((all((data_index < f for f in file_indices)) for data_index in data_indices))
|
# coding: utf-8
from __future__ import unicode_literals, absolute_import
import pytest
from boxsdk.util.multipart_stream import MultipartStream
@pytest.fixture(params=({}, {'data_1': b'data_1_value', 'data_2': b'data_2_value'}))
def multipart_stream_data(request):
return request.param
@pytest.fixture(params=({}, {'file_1': b'file_1_value', 'file_2': b'file_2_value'}))
def multipart_stream_files(request):
return request.param
def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files):
# pylint:disable=redefined-outer-name
if not multipart_stream_data and not multipart_stream_files:
pytest.xfail('Encoder does not support empty fields.')
stream = MultipartStream(multipart_stream_data, multipart_stream_files)
encoded_stream = stream.to_string()
data_indices = [encoded_stream.find(value) for value in multipart_stream_data.values()]
file_indices = [encoded_stream.find(value) for value in multipart_stream_files.values()]
assert -1 not in data_indices
assert -1 not in file_indices
assert all((all((data_index < f for f in file_indices)) for data_index in data_indices))
|
Disable redefined outer name pylint warning.
|
Disable redefined outer name pylint warning.
|
Python
|
apache-2.0
|
Frencil/box-python-sdk,Frencil/box-python-sdk,box/box-python-sdk
|
---
+++
@@ -18,6 +18,7 @@
def test_multipart_stream_orders_data_before_files(multipart_stream_data, multipart_stream_files):
+ # pylint:disable=redefined-outer-name
if not multipart_stream_data and not multipart_stream_files:
pytest.xfail('Encoder does not support empty fields.')
stream = MultipartStream(multipart_stream_data, multipart_stream_files)
|
79996420e775994b53d88f5b7c9ad21106a77831
|
examples/tests/test_examples.py
|
examples/tests/test_examples.py
|
import pytest
from examples.gbest_pso import main as gbest
from examples.lbest_pso import main as lbest
from examples.gc_pso import main as gc
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
|
# Copyright 2016 Andrich van Wyk
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Tests ensuring examples execute
"""
import pytest
from examples.gbest_pso import main as gbest
from examples.gc_pso import main as gc
from examples.lbest_pso import main as lbest
from examples.pso_optimizer import main as pso_optimizer
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gbest_pso(dimension, iterations):
gbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_lbest_pso(dimension, iterations):
lbest(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
gc(dimension, iterations)
@pytest.mark.parametrize("dimension", [
1,
30
])
@pytest.mark.parametrize("iterations", [
3
])
def test_gc_pso(dimension, iterations):
pso_optimizer(dimension, iterations)
|
Add license header and file documentation
|
Add license header and file documentation
|
Python
|
apache-2.0
|
avanwyk/cipy
|
---
+++
@@ -1,8 +1,24 @@
+# Copyright 2016 Andrich van Wyk
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+""" Tests ensuring examples execute
+"""
import pytest
from examples.gbest_pso import main as gbest
+from examples.gc_pso import main as gc
from examples.lbest_pso import main as lbest
-from examples.gc_pso import main as gc
from examples.pso_optimizer import main as pso_optimizer
|
8e61726b178c5175347008b9b77032fd223b6114
|
elections_r_us/security.py
|
elections_r_us/security.py
|
from models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
try:
query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
from .models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
"""Add a new user to the database.
session is expected to be a dbsession, username and password are
expected to be (unencrypted) unicode strings."""
session.add(User(
username=username,
password=pwd_context.encrypt(password)
))
def check_login(session, username, password):
"""Return whether username and password match in the database.
If username is not present, returns False."""
query = session.query(User).filter(User.username == username).first()
try:
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
Move query assignment out of try block
|
Move query assignment out of try block
make import relative
|
Python
|
mit
|
Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us
|
---
+++
@@ -1,4 +1,4 @@
-from models import User
+from .models import User
from passlib.apps import custom_app_context as pwd_context
def create_user(session, username, password):
@@ -16,8 +16,8 @@
"""Return whether username and password match in the database.
If username is not present, returns False."""
+ query = session.query(User).filter(User.username == username).first()
try:
- query = session.query(User).filter(User.username == username).first()
return pwd_context.verify(password, query.password)
except AttributeError:
return False
|
7bde0ba157431311ae138acd8a2018f85d8af91d
|
test_data.py
|
test_data.py
|
def f1(a, # S100
b): # S101
pass
def f2(
a,
b # S101
):
pass
def f3(
a,
b,
):
pass
# trailing comma after *args or **kwargs is a syntax error therefore
# we don't want to enforce it such situations
def f4(
a,
*args
):
pass
def f5(
b,
**kwargs
):
pass
f3(1, # S100
2) # S101
f3(
1,
2) # S101
f3(
1,
2 # S101
)
f3(
1,
2,
)
kwargs = {}
f5('-o', # S100
some_keyword_argument='./') # S101
f5(
b='something',
)
(
''.
format())
|
def f1(a, # S100
b): # S101
pass
def f2(
a,
b # S101
):
pass
def f3(
a,
b,
):
pass
# trailing comma after *args or **kwargs is a syntax error therefore
# we don't want to enforce it such situations
def f4(
a,
*args
):
pass
def f5(
b,
**kwargs
):
pass
def f6(
*,
d
):
pass
f3(1, # S100
2) # S101
f3(
1,
2) # S101
f3(
1,
2 # S101
)
f3(
1,
2,
)
kwargs = {}
f5('-o', # S100
some_keyword_argument='./') # S101
f5(
b='something',
)
(
''.
format())
|
Add a test for functions with keyword only arguments
|
Add a test for functions with keyword only arguments
This adds a test to ensure that no error is raised if a trailing comma
is missing from a function definition that has keyword only arguments.
Reviewed-by: Jakub Stasiak <1d3764b91b902f6b45836e2498da81fe35caf6d6@stasiak.at>
|
Python
|
mit
|
smarkets/flake8-strict
|
---
+++
@@ -34,6 +34,12 @@
):
pass
+def f6(
+ *,
+ d
+):
+ pass
+
f3(1, # S100
2) # S101
|
176c98dd7fec26980591a9ba3bb71bee1eeab8a7
|
backend/fureon/components/mixins.py
|
backend/fureon/components/mixins.py
|
import threading
class SingletonMixin(object):
__singleton_lock = threading.Lock()
__singleton_instance = None
@classmethod
def instance(cls, *args, **kwargs):
if not cls.__singleton_instance:
with cls.__singleton_lock:
if not cls.__singleton_instance:
cls.__singleton_instance = cls(*args, **kwargs)
return cls.__singleton_instance
|
import threading
class SingletonMixin(object):
__singleton_lock = None
__singleton_instance = None
@classmethod
def instance(cls, *args, **kwargs):
if not cls.__singleton_lock:
cls.__singleton_lock = threading.Lock()
if not cls.__singleton_instance:
with cls.__singleton_lock:
if not cls.__singleton_instance:
cls.__singleton_instance = cls(*args, **kwargs)
return cls.__singleton_instance
|
Change singletons to instantiate locks per-class
|
Change singletons to instantiate locks per-class
Before it was creating one for any singleton object, resulting in
Bad Things (infinite lock) happening when one singleton object
instantiates another singleton object in its instantiation. (I hope
that made sense...)
|
Python
|
apache-2.0
|
ATRAN2/fureon
|
---
+++
@@ -2,11 +2,14 @@
class SingletonMixin(object):
- __singleton_lock = threading.Lock()
+ __singleton_lock = None
__singleton_instance = None
@classmethod
def instance(cls, *args, **kwargs):
+ if not cls.__singleton_lock:
+ cls.__singleton_lock = threading.Lock()
+
if not cls.__singleton_instance:
with cls.__singleton_lock:
if not cls.__singleton_instance:
|
b141956f915a2b3e87e1c85949f6bddccf62c57c
|
okpub/client.py
|
okpub/client.py
|
# Copyright (c) 2015, Stavros Sachtouris
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from kamaki.clients import Client, utils, astakos, cyclades
class KeyAPI(Client):
"""A kamaki-based API client for public Key API"""
@staticmethod
def get_endpoint_url(auth_url):
"""Use the authentication URL and astakos to construct Key API url"""
auth = astakos.AstakosClient(auth_url, '')
compute_endpoint = auth.get_endpoint_url(
cyclades.CycladesComputeClient.service_type)
endpoint, _ = compute_endpoint.split('/compute')
return utils.path4url(endpoint, 'userdata', 'keys')
# utils.https.patch_ignore_ssl()
# print KeyAPI.get_endpoint_url('https://accounts.okeanos.grnet.gr/identity/v2.0')
|
Create KeyAPI class + method to construct endpoint
|
Create KeyAPI class + method to construct endpoint
|
Python
|
bsd-2-clause
|
saxtouri/okpub
|
---
+++
@@ -0,0 +1,43 @@
+# Copyright (c) 2015, Stavros Sachtouris
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright notice, this
+# list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+from kamaki.clients import Client, utils, astakos, cyclades
+
+
+class KeyAPI(Client):
+ """A kamaki-based API client for public Key API"""
+
+ @staticmethod
+ def get_endpoint_url(auth_url):
+ """Use the authentication URL and astakos to construct Key API url"""
+ auth = astakos.AstakosClient(auth_url, '')
+ compute_endpoint = auth.get_endpoint_url(
+ cyclades.CycladesComputeClient.service_type)
+ endpoint, _ = compute_endpoint.split('/compute')
+ return utils.path4url(endpoint, 'userdata', 'keys')
+
+
+
+# utils.https.patch_ignore_ssl()
+# print KeyAPI.get_endpoint_url('https://accounts.okeanos.grnet.gr/identity/v2.0')
|
|
77f820fe1286a5d39f2704c3821251bcbe20a2ba
|
indra/tests/test_rlimsp.py
|
indra/tests/test_rlimsp.py
|
from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
|
from indra.sources import rlimsp
def test_simple_usage():
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
for s in stmts:
assert len(s.evidence) == 1, "Wrong amount of evidence."
ev = s.evidence[0]
assert ev.annotations, "Missing annotations."
assert 'agents' in ev.annotations.keys()
assert 'trigger' in ev.annotations.keys()
def test_ungrounded_usage():
rp = rlimsp.process_pmc('PMC3717945', with_grounding=False)
assert len(rp.statements) == 33, len(rp.statements)
|
Make basic test more particular.
|
Make basic test more particular.
|
Python
|
bsd-2-clause
|
sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,bgyori/indra,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,johnbachman/indra
|
---
+++
@@ -5,6 +5,12 @@
rp = rlimsp.process_pmc('PMC3717945')
stmts = rp.statements
assert len(stmts) == 6, len(stmts)
+ for s in stmts:
+ assert len(s.evidence) == 1, "Wrong amount of evidence."
+ ev = s.evidence[0]
+ assert ev.annotations, "Missing annotations."
+ assert 'agents' in ev.annotations.keys()
+ assert 'trigger' in ev.annotations.keys()
def test_ungrounded_usage():
|
8d93b696700459db7772e1a3f3ae3601af5417d3
|
rust_sorting.py
|
rust_sorting.py
|
#!/usr/bin/env python3
import ctypes
import os
import glob
import numpy as np
# Load the Rust library when loading this module
target = "debug"
# target = "release"
libpath = os.path.join("target", target, "libsorting.*")
libfile = glob.glob(libpath)[0]
rustlib = ctypes.CDLL(libfile)
def quicksort(array):
rustlib.ffi_quicksort_i8(ctypes.c_void_p(array.ctypes.data), len(array))
|
#!/usr/bin/env python3
import ctypes
import os
import glob
import numpy as np
# Load the Rust library when loading this module
target = "debug"
# target = "release"
libpath = os.path.join("target", target, "libsorting.*")
libfile = glob.glob(libpath)[0]
rustlib = ctypes.CDLL(libfile)
def quicksort(array):
# rustlib.ffi_quicksort(
# ctypes.c_void_p(array.ctypes.data), len(array), b"i32"
# )
ptr = ctypes.c_void_p(array.ctypes.data)
n = len(array)
if array.dtype == np.int8:
rustlib.ffi_quicksort_i8(ptr, n)
elif array.dtype == np.int16:
rustlib.ffi_quicksort_i16(ptr, n)
elif array.dtype == np.int32:
rustlib.ffi_quicksort_i32(ptr, n)
elif array.dtype == np.int64:
rustlib.ffi_quicksort_i64(ptr, n)
elif array.dtype == np.uint8:
rustlib.ffi_quicksort_u8(ptr, n)
elif array.dtype == np.uint16:
rustlib.ffi_quicksort_u16(ptr, n)
elif array.dtype == np.uint32:
rustlib.ffi_quicksort_u32(ptr, n)
elif array.dtype == np.uint64:
rustlib.ffi_quicksort_u64(ptr, n)
elif array.dtype == np.float32:
rustlib.ffi_quicksort_f32(ptr, n)
elif array.dtype == np.float64:
rustlib.ffi_quicksort_f64(ptr, n)
|
Call the proper quicksort version depending on type.
|
Call the proper quicksort version depending on type.
|
Python
|
bsd-3-clause
|
nbigaouette/rust-sorting,nbigaouette/rust-sorting,nbigaouette/rust-sorting
|
---
+++
@@ -17,4 +17,33 @@
rustlib = ctypes.CDLL(libfile)
def quicksort(array):
- rustlib.ffi_quicksort_i8(ctypes.c_void_p(array.ctypes.data), len(array))
+
+ # rustlib.ffi_quicksort(
+ # ctypes.c_void_p(array.ctypes.data), len(array), b"i32"
+ # )
+
+ ptr = ctypes.c_void_p(array.ctypes.data)
+ n = len(array)
+
+ if array.dtype == np.int8:
+ rustlib.ffi_quicksort_i8(ptr, n)
+ elif array.dtype == np.int16:
+ rustlib.ffi_quicksort_i16(ptr, n)
+ elif array.dtype == np.int32:
+ rustlib.ffi_quicksort_i32(ptr, n)
+ elif array.dtype == np.int64:
+ rustlib.ffi_quicksort_i64(ptr, n)
+
+ elif array.dtype == np.uint8:
+ rustlib.ffi_quicksort_u8(ptr, n)
+ elif array.dtype == np.uint16:
+ rustlib.ffi_quicksort_u16(ptr, n)
+ elif array.dtype == np.uint32:
+ rustlib.ffi_quicksort_u32(ptr, n)
+ elif array.dtype == np.uint64:
+ rustlib.ffi_quicksort_u64(ptr, n)
+
+ elif array.dtype == np.float32:
+ rustlib.ffi_quicksort_f32(ptr, n)
+ elif array.dtype == np.float64:
+ rustlib.ffi_quicksort_f64(ptr, n)
|
adc8e9fd9a6e0960038e51e03bc3c211de283a39
|
python/setup.py
|
python/setup.py
|
from setuptools import find_packages, setup
setup(
name='tchannel',
version='0.1.0+dev0',
author='Aiden Scandella',
author_email='dev@uber.com',
description='Network multiplexing and framing protocol for RPC',
license='MIT',
url='https://github.com/uber/tchannel',
packages=find_packages(),
install_requires=['contextlib2', 'enum34'],
entry_points={
'console_scripts': [
'tcurl.py = tchannel.tcurl:main'
]
},
)
|
from setuptools import find_packages, setup
setup(
name='tchannel',
version='0.1.0+dev0',
author='Aiden Scandella',
author_email='dev@uber.com',
description='Network multiplexing and framing protocol for RPC',
license='MIT',
url='https://github.com/uber/tchannel',
packages=find_packages(),
install_requires=['contextlib2', 'enum34', 'futures'],
entry_points={
'console_scripts': [
'tcurl.py = tchannel.tcurl:main'
]
},
)
|
Add futures as a dependency.
|
Add futures as a dependency.
|
Python
|
mit
|
vanloswang/tchannel,sasa233/tchannel,benfleis/tchannel,hustxiaoc/tchannel,savaki/tchannel,RyanTech/tchannel,Zirpon/tchannel,RyanTech/tchannel,i/tchannel,chenwenbin928/tchannel,i/tchannel,vanloswang/tchannel,sasa233/tchannel,bunnyblue/tchannel,Zirpon/tchannel,i/tchannel,benfleis/tchannel,benfleis/tchannel,chenwenbin928/tchannel,hustxiaoc/tchannel,Zirpon/tchannel,vanloswang/tchannel,bunnyblue/tchannel,savaki/tchannel,chenwenbin928/tchannel,RyanTech/tchannel,savaki/tchannel,sasa233/tchannel,bunnyblue/tchannel,hustxiaoc/tchannel
|
---
+++
@@ -10,7 +10,7 @@
license='MIT',
url='https://github.com/uber/tchannel',
packages=find_packages(),
- install_requires=['contextlib2', 'enum34'],
+ install_requires=['contextlib2', 'enum34', 'futures'],
entry_points={
'console_scripts': [
'tcurl.py = tchannel.tcurl:main'
|
f6cd6b3377769af524377979438b9e662bb9175a
|
tangled/site/model/base.py
|
tangled/site/model/base.py
|
import datetime
from sqlalchemy.schema import Column
from sqlalchemy.types import DateTime, Integer
from sqlalchemy.ext.declarative import declarative_base, declared_attr
Base = declarative_base()
class BaseMixin:
id = Column(Integer, primary_key=True)
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
class TimestampMixin:
created_at = Column(
DateTime, nullable=False, default=datetime.datetime.now)
updated_at = Column(DateTime)
|
from datetime import datetime
from sqlalchemy.schema import Column
from sqlalchemy.types import DateTime, Integer
from sqlalchemy.ext.declarative import declarative_base, declared_attr
Base = declarative_base()
class BaseMixin:
id = Column(Integer, primary_key=True)
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
class TimestampMixin:
created_at = Column(DateTime, nullable=False, default=datetime.now)
updated_at = Column(DateTime, onupdate=datetime.now)
|
Update updated time on update
|
Update updated time on update
I.e., added onupdate=datetime.now to TimestampMixin.updated_at so that it will
be automatically updated whenever a record is edited.
|
Python
|
mit
|
TangledWeb/tangled.site
|
---
+++
@@ -1,4 +1,4 @@
-import datetime
+from datetime import datetime
from sqlalchemy.schema import Column
from sqlalchemy.types import DateTime, Integer
@@ -20,6 +20,5 @@
class TimestampMixin:
- created_at = Column(
- DateTime, nullable=False, default=datetime.datetime.now)
- updated_at = Column(DateTime)
+ created_at = Column(DateTime, nullable=False, default=datetime.now)
+ updated_at = Column(DateTime, onupdate=datetime.now)
|
38f3fb09857a4babbd893f546f39c60ce4865fb1
|
lib/main/tests/__init__.py
|
lib/main/tests/__init__.py
|
# (c) 2013, AnsibleWorks
#
# This file is part of Ansible Commander
#
# Ansible Commander is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Commander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible Commander. If not, see <http://www.gnu.org/licenses/>.
from lib.main.tests.organizations import OrganizationsTest
from lib.main.tests.users import UsersTest
from lib.main.tests.inventory import InventoryTest
from lib.main.tests.projects import ProjectsTest
from lib.main.tests.commands import AcomInventoryTest
from lib.main.tests.tasks import RunLaunchJobTest
|
# (c) 2013, AnsibleWorks
#
# This file is part of Ansible Commander
#
# Ansible Commander is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible Commander is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible Commander. If not, see <http://www.gnu.org/licenses/>.
from lib.main.tests.organizations import OrganizationsTest
from lib.main.tests.users import UsersTest
from lib.main.tests.inventory import InventoryTest
# FIXME: Uncomment the next line when projects.py is added to git.
# from lib.main.tests.projects import ProjectsTest
from lib.main.tests.commands import AcomInventoryTest
from lib.main.tests.tasks import RunLaunchJobTest
|
Fix import error for missing file.
|
Fix import error for missing file.
|
Python
|
apache-2.0
|
snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx
|
---
+++
@@ -19,6 +19,7 @@
from lib.main.tests.organizations import OrganizationsTest
from lib.main.tests.users import UsersTest
from lib.main.tests.inventory import InventoryTest
-from lib.main.tests.projects import ProjectsTest
+# FIXME: Uncomment the next line when projects.py is added to git.
+# from lib.main.tests.projects import ProjectsTest
from lib.main.tests.commands import AcomInventoryTest
from lib.main.tests.tasks import RunLaunchJobTest
|
4518263958a9985a7b9b2018264ee0c42479fd10
|
src/tempel/urls.py
|
src/tempel/urls.py
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^\+media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^admin/', include(admin.site.urls)),
(r'^(?P<id>\d+)$', 'tempel.views.view', {'mode': 'html'}),
(r'^(?P<id>\d+).html$', 'tempel.views.view', {'mode': 'html'}),
(r'^(?P<id>\d+).txt$', 'tempel.views.view', {'mode': 'txt'}),
(r'^e/(?P<id>\d+)/download/$', 'tempel.views.download'),
(r'^$', 'tempel.views.index'),
)
|
from django.conf.urls.defaults import *
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
(r'^\+media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^admin/', include(admin.site.urls)),
url(r'^(?P<id>\d+)$', 'tempel.views.view', {'mode': 'html'}, name='tempel_view'),
url(r'^(?P<id>\d+).html$', 'tempel.views.view', {'mode': 'html'}, name='tempel_html'),
url(r'^(?P<id>\d+).txt$', 'tempel.views.view', {'mode': 'txt'}, name='tempel_raw'),
url(r'^e/(?P<id>\d+)/download/$', 'tempel.views.download', name='tempel_download'),
url(r'^$', 'tempel.views.index', name='tempel_index'),
)
|
Add names to each url
|
Add names to each url
|
Python
|
agpl-3.0
|
fajran/tempel
|
---
+++
@@ -8,10 +8,10 @@
(r'^\+media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^admin/', include(admin.site.urls)),
- (r'^(?P<id>\d+)$', 'tempel.views.view', {'mode': 'html'}),
- (r'^(?P<id>\d+).html$', 'tempel.views.view', {'mode': 'html'}),
- (r'^(?P<id>\d+).txt$', 'tempel.views.view', {'mode': 'txt'}),
- (r'^e/(?P<id>\d+)/download/$', 'tempel.views.download'),
- (r'^$', 'tempel.views.index'),
+ url(r'^(?P<id>\d+)$', 'tempel.views.view', {'mode': 'html'}, name='tempel_view'),
+ url(r'^(?P<id>\d+).html$', 'tempel.views.view', {'mode': 'html'}, name='tempel_html'),
+ url(r'^(?P<id>\d+).txt$', 'tempel.views.view', {'mode': 'txt'}, name='tempel_raw'),
+ url(r'^e/(?P<id>\d+)/download/$', 'tempel.views.download', name='tempel_download'),
+ url(r'^$', 'tempel.views.index', name='tempel_index'),
)
|
b3c2e4636b3f271eeba2e9a7c11f491ed7d77f71
|
attributes/community/main.py
|
attributes/community/main.py
|
import sys
from core import Tokenizer
from utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
t_sub = options.get('sub')
t_star = options.get('star')
t_forks = options.get('forks')
cursor.execute('''
SELECT
url
FROM
projects
WHERE
id = {0}
'''.format(project_id))
record = cursor.fetchone()
tokenizer = Tokenizer()
full_url = tokenizer.tokenize(record[0].rstrip())
json_response = url_to_json(full_url)
subscribers_count = json_response.get('subscribers_count', 0)
stargazers_count = json_response.get('stargazers_count', 0)
forks = json_response.get('forks', 0)
result = False
if (
(subscribers_count >= t_sub and stargazers_count >= t_star) or
(stargazers_count >= t_star and forks >= t_forks) or
(subscribers_count >= t_sub and forks >= t_forks)
):
result = True
return (
result,
{
'sub': subscribers_count,
'star': stargazers_count,
'forks': forks
}
)
if __name__ == '__main__':
print('Attribute plugins are not meant to be executed directly.')
sys.exit(1)
|
import sys
from lib.core import Tokenizer
from lib.utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
t_sub = options.get('sub')
t_star = options.get('star')
t_forks = options.get('forks')
cursor.execute('''
SELECT
url
FROM
projects
WHERE
id = {0}
'''.format(project_id))
record = cursor.fetchone()
tokenizer = Tokenizer()
full_url = tokenizer.tokenize(record[0].rstrip())
json_response = url_to_json(full_url)
subscribers_count = json_response.get('subscribers_count', 0)
stargazers_count = json_response.get('stargazers_count', 0)
forks = json_response.get('forks', 0)
result = False
if (
(subscribers_count >= t_sub and stargazers_count >= t_star) or
(stargazers_count >= t_star and forks >= t_forks) or
(subscribers_count >= t_sub and forks >= t_forks)
):
result = True
return (
result,
{
'sub': subscribers_count,
'star': stargazers_count,
'forks': forks
}
)
if __name__ == '__main__':
print('Attribute plugins are not meant to be executed directly.')
sys.exit(1)
|
Update community to use new lib namespace
|
Update community to use new lib namespace
|
Python
|
apache-2.0
|
RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper,RepoReapers/reaper
|
---
+++
@@ -1,7 +1,7 @@
import sys
-from core import Tokenizer
-from utilities import url_to_json
+from lib.core import Tokenizer
+from lib.utilities import url_to_json
def run(project_id, repo_path, cursor, **options):
|
dc2ed8e733d0497e1812ee31b61279083ef1861f
|
backend/breach/tests/base.py
|
backend/breach/tests/base.py
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target, Round
class RuptureTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='https://di.uoa.gr/?breach=%s',
prefix='test',
alphabet='0123456789'
)
self.victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140',
snifferendpoint='http://localhost/'
)
round = Round.objects.create(
victim=self.victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01'
)
self.samplesets = [
SampleSet.objects.create(
round=round,
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
round=round,
candidatealphabet='1',
data='small'
)
]
# Balance checking
self.balance_victim = Victim.objects.create(
target=target,
sourceip='192.168.10.141',
snifferendpoint='http://localhost/'
)
balance_round = Round.objects.create(
victim=self.balance_victim,
amount=1,
knownsecret='testsecret',
knownalphabet='0123',
roundcardinality=3
)
|
from django.test import TestCase
from breach.models import SampleSet, Victim, Target, Round
class RuptureTestCase(TestCase):
def setUp(self):
target = Target.objects.create(
endpoint='https://di.uoa.gr/?breach=%s',
prefix='test',
alphabet='0123456789'
)
self.victim = Victim.objects.create(
target=target,
sourceip='192.168.10.140',
snifferendpoint='http://localhost/'
)
round = Round.objects.create(
victim=self.victim,
amount=1,
knownsecret='testsecret',
knownalphabet='01'
)
self.samplesets = [
SampleSet.objects.create(
round=round,
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
round=round,
candidatealphabet='1',
data='small'
)
]
# Balance checking
self.balance_victim = Victim.objects.create(
target=target,
sourceip='192.168.10.141',
snifferendpoint='http://localhost/'
)
balance_round = Round.objects.create(
victim=self.balance_victim,
amount=1,
knownsecret='testsecret',
knownalphabet='0123',
roundcardinality=3
)
self.balance_samplesets = [
SampleSet.objects.create(
round=balance_round,
candidatealphabet='0',
data='bigbigbigbigbigbig'
),
SampleSet.objects.create(
round=balance_round,
candidatealphabet='123',
data='small'
)
]
|
Add balance checking test samplesets
|
Add balance checking test samplesets
|
Python
|
mit
|
dimriou/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture
|
---
+++
@@ -46,3 +46,15 @@
knownalphabet='0123',
roundcardinality=3
)
+ self.balance_samplesets = [
+ SampleSet.objects.create(
+ round=balance_round,
+ candidatealphabet='0',
+ data='bigbigbigbigbigbig'
+ ),
+ SampleSet.objects.create(
+ round=balance_round,
+ candidatealphabet='123',
+ data='small'
+ )
+ ]
|
e6ee77f88ab0d05b221b5470d2c649d3d242f505
|
hecate/core/__init__.py
|
hecate/core/__init__.py
|
from hecate.core.base import CellularAutomaton
from hecate.core.properties import (
IntegerProperty,
)
from hecate.core.topology.lattice import (
OrthogonalLattice,
)
from hecate.core.topology.neighborhood import (
MooreNeighborhood,
)
from hecate.core.topology.border import (
TorusBorder,
)
from hecate.core.experiment import Experiment
__all__ = [
'CellularAutomaton',
'IntegerProperty',
'OrthogonalLattice',
'MooreNeighborhood',
'TorusBorder',
'Experiment',
]
|
from hecate.core.base import CellularAutomaton
from hecate.core.properties import (
IntegerProperty,
)
from hecate.core.topology.lattice import (
OrthogonalLattice,
)
from hecate.core.topology.neighborhood import (
MooreNeighborhood,
)
from hecate.core.topology.border import (
TorusBorder, StaticBorder,
)
from hecate.core.experiment import Experiment
__all__ = [
'CellularAutomaton',
'IntegerProperty',
'OrthogonalLattice',
'MooreNeighborhood',
'TorusBorder',
'StaticBorder',
'Experiment',
]
|
Add StaticBorder to public API
|
Add StaticBorder to public API
|
Python
|
mit
|
a5kin/hecate,a5kin/hecate
|
---
+++
@@ -9,7 +9,7 @@
MooreNeighborhood,
)
from hecate.core.topology.border import (
- TorusBorder,
+ TorusBorder, StaticBorder,
)
from hecate.core.experiment import Experiment
@@ -19,5 +19,6 @@
'OrthogonalLattice',
'MooreNeighborhood',
'TorusBorder',
+ 'StaticBorder',
'Experiment',
]
|
f5d3fb307bb17bc6651fe32ea2f520e7b87d37ca
|
utility.py
|
utility.py
|
#!/usr/bin/env python
"""
Copyright 2016 Brian Quach
Licensed under MIT (https://github.com/brianquach/udacity-nano-fullstack-conference/blob/master/LICENSE) # noqa
Code Citation:
https://github.com/udacity/FSND-P4-Design-A-Game/blob/master/Skeleton%20Project%20Guess-a-Number/utils.py #noqa
"""
import endpoints
from google.appengine.ext import ndb
def get_by_urlsafe(urlsafe, model):
"""Returns an ndb.Model entity that the urlsafe key points to. Checks
that the type of entity returned is of the correct kind. Raises an
error if the key String is malformed or the entity is of the incorrect
kind
Args:
urlsafe: A urlsafe key string
model: The expected entity kind
Returns:
The entity that the urlsafe Key string points to or None if no entity
exists.
Raises:
ValueError:"""
try:
key = ndb.Key(urlsafe=urlsafe)
except TypeError:
raise endpoints.BadRequestException('Invalid Key')
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise endpoints.BadRequestException('Invalid Key')
else:
raise
entity = key.get()
if not entity:
return None
if not isinstance(entity, model):
raise ValueError('Incorrect Kind')
return entity
|
#!/usr/bin/env python
"""
Code Citation:
https://github.com/udacity/FSND-P4-Design-A-Game/blob/master/Skeleton%20Project%20Guess-a-Number/utils.py #noqa
"""
import endpoints
from google.appengine.ext import ndb
def get_by_urlsafe(urlsafe, model):
"""Returns an ndb.Model entity that the urlsafe key points to. Checks
that the type of entity returned is of the correct kind. Raises an
error if the key String is malformed or the entity is of the incorrect
kind
Args:
urlsafe: A urlsafe key string
model: The expected entity kind
Returns:
The entity that the urlsafe Key string points to or None if no entity
exists.
Raises:
ValueError:"""
try:
key = ndb.Key(urlsafe=urlsafe)
except TypeError:
raise endpoints.BadRequestException('Invalid Key')
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise endpoints.BadRequestException('Invalid Key')
else:
raise
entity = key.get()
if not entity:
return None
if not isinstance(entity, model):
raise ValueError('Incorrect Kind')
return entity
|
Remove copyright to code not written by bquach.
|
Doc: Remove copyright to code not written by bquach.
|
Python
|
mit
|
brianquach/udacity-nano-fullstack-game
|
---
+++
@@ -1,8 +1,5 @@
#!/usr/bin/env python
"""
-Copyright 2016 Brian Quach
-Licensed under MIT (https://github.com/brianquach/udacity-nano-fullstack-conference/blob/master/LICENSE) # noqa
-
Code Citation:
https://github.com/udacity/FSND-P4-Design-A-Game/blob/master/Skeleton%20Project%20Guess-a-Number/utils.py #noqa
"""
|
a980be7e64193fa29f3913651f48a413c02f5fa3
|
climlab/__init__.py
|
climlab/__init__.py
|
__version__ = '0.3.2'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
__version__ = '0.4.0'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
Increment version number to 0.4.0
|
Increment version number to 0.4.0
This should have been done a while ago... many changes including comprehensive documentation, support for lat-lon grids, bug fixes, etc.
|
Python
|
mit
|
cjcardinale/climlab,brian-rose/climlab,cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab
|
---
+++
@@ -1,4 +1,4 @@
-__version__ = '0.3.2'
+__version__ = '0.4.0'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
|
b7ada8c3a4cacc5b6b91aa5cb91cd57a6ee5566d
|
example.py
|
example.py
|
# -*- coding: utf-8 -*-
"""
Flask-Login example
===================
This is a small application that provides a trivial demonstration of
Flask-Login, including remember me functionality.
:copyright: (C) 2011 by Matthew Frazier.
:license: MIT/X11, see LICENSE for more details.
"""
from flask import Flask
from flask_olinauth import OlinAuth, auth_required, current_user
app = Flask(__name__)
SECRET_KEY = "yeah, not actually a secret"
DEBUG = True
app.config.from_object(__name__)
oa = OlinAuth(app)
#initial OlinAuth, with callback host of localhost:5000
oa.init_app(app, 'localhost:5000')
@app.route("/")
def index():
if current_user:
responseString = "Awesome index, guess what? %s is logged in. Sweet, right?" % current_user['id']
else:
responseString = "It is kind of lonely here... No users are logged in"
return responseString
@app.route("/secret")
@auth_required
def secret():
return "I wouldn't normally show you this, but since %s is logged in, here is the secret: 42" % current_user['id']
if __name__ == "__main__":
app.run(debug=True)
|
# -*- coding: utf-8 -*-
"""
Flask-Login example
===================
This is a small application that provides a trivial demonstration of
Flask-Login, including remember me functionality.
:copyright: (C) 2011 by Matthew Frazier.
:license: MIT/X11, see LICENSE for more details.
"""
from flask import Flask
from flask.exti.olinauth import OlinAuth, auth_required, current_user
app = Flask(__name__)
SECRET_KEY = "yeah, not actually a secret"
DEBUG = True
app.config.from_object(__name__)
oa = OlinAuth(app)
#initial OlinAuth, with callback host of localhost:5000
oa.init_app(app, 'localhost:5000')
@app.route("/")
def index():
if current_user:
responseString = "Awesome index, guess what? %s is logged in. Sweet, right?" % current_user['id']
else:
responseString = "It is kind of lonely here... No users are logged in"
return responseString
@app.route("/secret")
@auth_required
def secret():
return "I wouldn't normally show you this, but since %s is logged in, here is the secret: 42" % current_user['id']
if __name__ == "__main__":
app.run(debug=True)
|
Update import to use flask.ext
|
Update import to use flask.ext
|
Python
|
mit
|
corydolphin/flask-olinauth
|
---
+++
@@ -9,7 +9,7 @@
:license: MIT/X11, see LICENSE for more details.
"""
from flask import Flask
-from flask_olinauth import OlinAuth, auth_required, current_user
+from flask.exti.olinauth import OlinAuth, auth_required, current_user
app = Flask(__name__)
SECRET_KEY = "yeah, not actually a secret"
|
e16911bb965a86fc841b17d4748fdc75d8ed5cf2
|
quizzes.py
|
quizzes.py
|
from database import QuizDB
db = QuizDB(host=config.REDIS_HOST, port=config.REDIS_PORT)
class Quiz(Base):
def __init__(self, id):
self.id = id
QUESTION_HASH = "{0}:question".format(self.id)
ANSWER_HASH = "{0}:answer".format(self.id)
def new_card(self, question, answer):
assert db.hlen(QUESTION_HASH) == db.hlen(ANSWER_HASH)
q_id = max([int(i) for i in db.hkeys(QUESTION_HASH)]) + 1
db.hset(QUESTION_HASH, q_id, question)
db.hset(ANSWER_HASH, q_id, answer)
def delete_card(self, q_id):
db.hdel(QUESTION_HASH, q_id)
db.hdel(ANSWER_HASH, q_id)
|
from database import QuizDB
db = QuizDB(host=config.REDIS_HOST, port=config.REDIS_PORT)
class Quiz(Base):
def __init__(self, id):
self.id = id
QUESTION_HASH = "{0}:question".format(self.id)
ANSWER_HASH = "{0}:answer".format(self.id)
def new_card(self, question, answer):
assert db.hlen(QUESTION_HASH) == db.hlen(ANSWER_HASH)
q_id = max([int(i) for i in db.hkeys(QUESTION_HASH)]) + 1
db.hset(QUESTION_HASH, q_id, question)
db.hset(ANSWER_HASH, q_id, answer)
def delete_card(self, q_id):
db.hdel(QUESTION_HASH, q_id)
db.hdel(ANSWER_HASH, q_id)
def update_question(self, q_id, updated_question):
db.hset(QUESTION_HASH, q_id, updated_question)
def update_answer(self, q_id, updated_answer):
db.hset(ANSWER_HASH, q_id, updated_answer)
|
Add additional CRUD functions to Quiz class
|
Add additional CRUD functions to Quiz class
|
Python
|
bsd-2-clause
|
estreeper/quizalicious,estreeper/quizalicious,estreeper/quizalicious
|
---
+++
@@ -21,3 +21,8 @@
db.hdel(QUESTION_HASH, q_id)
db.hdel(ANSWER_HASH, q_id)
+ def update_question(self, q_id, updated_question):
+ db.hset(QUESTION_HASH, q_id, updated_question)
+
+ def update_answer(self, q_id, updated_answer):
+ db.hset(ANSWER_HASH, q_id, updated_answer)
|
a769c30a874913cbf053c6973b137520dfc58e93
|
stingray/__init__.py
|
stingray/__init__.py
|
# Licensed under MIT license - see LICENSE.rst
"""
This is an Astropy affiliated package.
"""
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
# For egg_info test builds to pass, put package imports here.
if not _ASTROPY_SETUP_:
from stingray.lightcurve import *
from stingray.utils import *
from stingray.powerspectrum import *
|
# Licensed under MIT license - see LICENSE.rst
"""
Library of Time Series Methods For Astronomical X-ray Data.
"""
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import *
# ----------------------------------------------------------------------------
# For egg_info test builds to pass, put package imports here.
if not _ASTROPY_SETUP_:
from stingray.lightcurve import *
from stingray.utils import *
from stingray.powerspectrum import *
|
Remove useless information from library description
|
Remove useless information from library description
|
Python
|
mit
|
evandromr/stingray,abigailStev/stingray,dhuppenkothen/stingray,pabell/stingray,StingraySoftware/stingray
|
---
+++
@@ -1,7 +1,7 @@
# Licensed under MIT license - see LICENSE.rst
"""
-This is an Astropy affiliated package.
+Library of Time Series Methods For Astronomical X-ray Data.
"""
# Affiliated packages may add whatever they like to this file, but
|
c2003d452914a2725e04f58a744bbafe4554dec5
|
holmes/migrations/versions/4d45dd3d8ce5_add_a_flag_for_review_active.py
|
holmes/migrations/versions/4d45dd3d8ce5_add_a_flag_for_review_active.py
|
"""add a flag for review active
Revision ID: 4d45dd3d8ce5
Revises: 49d09b3d2801
Create Date: 2014-02-26 16:29:54.507710
"""
# revision identifiers, used by Alembic.
revision = '4d45dd3d8ce5'
down_revision = '49d09b3d2801'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'violations',
sa.Column('review_is_active', sa.Integer, nullable=False, server_default='1')
)
connection = op.get_bind()
connection.execute('''UPDATE violations
SET review_is_active = 0
WHERE review_id IN (SELECT id FROM reviews WHERE is_active = 0)''')
op.create_index(
'idx_key_domain_review_active',
'violations',
['key_id', 'domain_id', 'review_is_active'])
def downgrade():
op.drop_index('idx_key_domain_review_active', 'violations'),
op.drop_column('violations', 'review_is_active')
|
"""add a flag for review active
Revision ID: 4d45dd3d8ce5
Revises: 49d09b3d2801
Create Date: 2014-02-26 16:29:54.507710
"""
# revision identifiers, used by Alembic.
revision = '4d45dd3d8ce5'
down_revision = '49d09b3d2801'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'violations',
sa.Column('review_is_active', sa.Integer, nullable=False, server_default='1')
)
connection = op.get_bind()
connection.execute('''UPDATE violations
SET review_is_active = 0
WHERE review_id IN (SELECT id FROM reviews WHERE is_active = 0)''')
op.create_index(
'idx_key_domain_review_active',
'violations',
['key_id', 'domain_id', 'review_is_active'])
def downgrade():
op.drop_index('idx_key_domain_review_active', 'violations')
op.drop_column('violations', 'review_is_active')
|
Fix syntax of a migration
|
Fix syntax of a migration
|
Python
|
mit
|
holmes-app/holmes-api,holmes-app/holmes-api
|
---
+++
@@ -32,5 +32,5 @@
def downgrade():
- op.drop_index('idx_key_domain_review_active', 'violations'),
+ op.drop_index('idx_key_domain_review_active', 'violations')
op.drop_column('violations', 'review_is_active')
|
f565b24c7766df2681669abd6a3c2145a4a62853
|
example/deploy.py
|
example/deploy.py
|
from pyinfra import inventory, state
from pyinfra_docker import deploy_docker
from pyinfra_etcd import deploy_etcd
from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node
SUDO = True
FAIL_PERCENT = 0
def get_etcd_nodes():
return [
'http://{0}:2379'.format(
etcd_node.fact.network_devices[etcd_node.data.etcd_interface]
['ipv4']['address'],
)
for etcd_node in inventory.get_group('etcd_nodes')
]
# Install/configure etcd cluster
with state.limit('etcd_nodes'):
deploy_etcd()
# Install/configure the masters (apiserver, controller, scheduler)
with state.limit('kubernetes_masters'):
deploy_kubernetes_master(etcd_nodes=get_etcd_nodes())
# Install/configure the nodes
with state.limit('kubernetes_nodes'):
# Install Docker
deploy_docker(config={
# Make Docker use the Vagrant provided interface which has it's own /24
'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}',
})
# Install Kubernetes node components (kubelet, kube-proxy)
first_master = inventory.get_group('kubernetes_masters')[0]
deploy_kubernetes_node(
master_address='http://{0}'.format((
first_master
.fact.network_devices[first_master.data.network_interface]
['ipv4']['address']
)),
)
|
from pyinfra import inventory, state
from pyinfra_docker import deploy_docker
from pyinfra_etcd import deploy_etcd
from pyinfra_kubernetes import deploy_kubernetes_master, deploy_kubernetes_node
SUDO = True
FAIL_PERCENT = 0
def get_etcd_nodes():
return [
'http://{0}:2379'.format(
etcd_node.fact.network_devices[etcd_node.data.etcd_interface]
['ipv4']['address'],
)
for etcd_node in inventory.get_group('etcd_nodes')
]
# Install/configure etcd cluster
with state.limit('etcd_nodes'):
deploy_etcd()
# Install/configure the masters (apiserver, controller, scheduler)
with state.limit('kubernetes_masters'):
deploy_kubernetes_master(etcd_nodes=get_etcd_nodes())
# Install/configure the nodes
with state.limit('kubernetes_nodes'):
# Install Docker
deploy_docker(config={
# Make Docker use the Vagrant provided interface which has it's own /24
'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}/24',
})
# Install Kubernetes node components (kubelet, kube-proxy)
first_master = inventory.get_group('kubernetes_masters')[0]
deploy_kubernetes_node(
master_address='http://{0}'.format((
first_master
.fact.network_devices[first_master.data.network_interface]
['ipv4']['address']
)),
)
|
Include mask size on docker CIDR.
|
Include mask size on docker CIDR.
|
Python
|
mit
|
EDITD/pyinfra-kubernetes,EDITD/pyinfra-kubernetes
|
---
+++
@@ -33,7 +33,7 @@
# Install Docker
deploy_docker(config={
# Make Docker use the Vagrant provided interface which has it's own /24
- 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}',
+ 'bip': '{{ host.fact.network_devices[host.data.network_interface].ipv4.address }}/24',
})
# Install Kubernetes node components (kubelet, kube-proxy)
|
9b820aff6fc64ffa750dbf92a51d754f9c55ab79
|
froide/publicbody/search_indexes.py
|
froide/publicbody/search_indexes.py
|
from __future__ import print_function
from django.conf import settings
from haystack import indexes
from celery_haystack.indexes import CelerySearchIndex
from .models import PublicBody
PUBLIC_BODY_BOOSTS = settings.FROIDE_CONFIG.get("public_body_boosts", {})
class PublicBodyIndex(CelerySearchIndex, indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
name = indexes.CharField(model_attr='name', boost=1.5)
jurisdiction = indexes.CharField(model_attr='jurisdiction__name', default='')
topic_auto = indexes.EdgeNgramField(model_attr='topic_name')
topic_slug = indexes.CharField(model_attr='topic__slug')
name_auto = indexes.EdgeNgramField(model_attr='name')
url = indexes.CharField(model_attr='get_absolute_url')
def get_model(self):
return PublicBody
def index_queryset(self, **kwargs):
"""Used when the entire index for model is updated."""
return self.get_model().objects.get_for_search_index()
def prepare(self, obj):
data = super(PublicBodyIndex, self).prepare(obj)
if obj.classification in PUBLIC_BODY_BOOSTS:
data['boost'] = PUBLIC_BODY_BOOSTS[obj.classification]
print("Boosting %s at %f" % (obj, data['boost']))
return data
|
from __future__ import print_function
from django.conf import settings
from haystack import indexes
from celery_haystack.indexes import CelerySearchIndex
from .models import PublicBody
PUBLIC_BODY_BOOSTS = settings.FROIDE_CONFIG.get("public_body_boosts", {})
class PublicBodyIndex(CelerySearchIndex, indexes.Indexable):
text = indexes.EdgeNgramField(document=True, use_template=True)
name = indexes.CharField(model_attr='name', boost=1.5)
jurisdiction = indexes.CharField(model_attr='jurisdiction__name', default='')
topic_auto = indexes.EdgeNgramField(model_attr='topic__name', default='')
topic_slug = indexes.CharField(model_attr='topic__slug', default='')
name_auto = indexes.EdgeNgramField(model_attr='name')
url = indexes.CharField(model_attr='get_absolute_url')
def get_model(self):
return PublicBody
def index_queryset(self, **kwargs):
"""Used when the entire index for model is updated."""
return self.get_model().objects.get_for_search_index()
def prepare(self, obj):
data = super(PublicBodyIndex, self).prepare(obj)
if obj.classification in PUBLIC_BODY_BOOSTS:
data['boost'] = PUBLIC_BODY_BOOSTS[obj.classification]
print("Boosting %s at %f" % (obj, data['boost']))
return data
|
Make topic_* field on public body search index optional
|
Make topic_* field on public body search index optional
|
Python
|
mit
|
okfse/froide,ryankanno/froide,fin/froide,catcosmo/froide,ryankanno/froide,stefanw/froide,okfse/froide,catcosmo/froide,okfse/froide,ryankanno/froide,stefanw/froide,ryankanno/froide,stefanw/froide,stefanw/froide,okfse/froide,okfse/froide,LilithWittmann/froide,catcosmo/froide,CodeforHawaii/froide,fin/froide,LilithWittmann/froide,catcosmo/froide,LilithWittmann/froide,CodeforHawaii/froide,LilithWittmann/froide,ryankanno/froide,catcosmo/froide,CodeforHawaii/froide,fin/froide,CodeforHawaii/froide,LilithWittmann/froide,CodeforHawaii/froide,stefanw/froide,fin/froide
|
---
+++
@@ -14,8 +14,8 @@
text = indexes.EdgeNgramField(document=True, use_template=True)
name = indexes.CharField(model_attr='name', boost=1.5)
jurisdiction = indexes.CharField(model_attr='jurisdiction__name', default='')
- topic_auto = indexes.EdgeNgramField(model_attr='topic_name')
- topic_slug = indexes.CharField(model_attr='topic__slug')
+ topic_auto = indexes.EdgeNgramField(model_attr='topic__name', default='')
+ topic_slug = indexes.CharField(model_attr='topic__slug', default='')
name_auto = indexes.EdgeNgramField(model_attr='name')
url = indexes.CharField(model_attr='get_absolute_url')
|
d8a36da519dd5b5659777e2b92564569a3dfb9f8
|
test/test_get_new.py
|
test/test_get_new.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
import sys
import pytest
@pytest.mark.trylast
@needinternet
def test_check_get_new(fixture_update_dir):
"""Test that gets new version from internet"""
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
import sys
import pytest
@needinternet
def test_check_get_new(fixture_update_dir):
"""Test that gets new version from internet"""
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
@needinternet
def test_check_get_invalid_archive(fixture_update_dir):
"""Test that gets new version from internet"""
package=fixture_update_dir("0.0.1")
launch = Launcher('what file? hahahaha',
r'http://rlee287.github.io/pyautoupdate/testing2/',
newfiles="project.tar.gz")
launch._get_new()
assert os.path.isfile("project.tar.gz.dump")
os.remove("project.tar.gz.dump")
|
Write test that attempts to unpack invalid archive
|
Write test that attempts to unpack invalid archive
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
---
+++
@@ -9,7 +9,6 @@
import pytest
-@pytest.mark.trylast
@needinternet
def test_check_get_new(fixture_update_dir):
"""Test that gets new version from internet"""
@@ -20,3 +19,14 @@
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
+
+@needinternet
+def test_check_get_invalid_archive(fixture_update_dir):
+ """Test that gets new version from internet"""
+ package=fixture_update_dir("0.0.1")
+ launch = Launcher('what file? hahahaha',
+ r'http://rlee287.github.io/pyautoupdate/testing2/',
+ newfiles="project.tar.gz")
+ launch._get_new()
+ assert os.path.isfile("project.tar.gz.dump")
+ os.remove("project.tar.gz.dump")
|
75add42972c059a00b01fe2b4eeb716d905a3bd6
|
mamba/cli.py
|
mamba/cli.py
|
# -*- coding: utf-8 -*-
import sys
import imp
from mamba import formatters
from mamba.runner import Runner
def main():
formatter = formatters.DocumentationFormatter()
runner = Runner(formatter)
for file_ in sys.argv[1:]:
module = imp.load_source(file_.replace('.py', ''), file_)
runner.run(module)
formatter.format_summary()
if runner.has_failed_specs:
sys.exit(1)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import sys
import os
import imp
from mamba import formatters
from mamba.runner import Runner
def main():
formatter = formatters.DocumentationFormatter()
runner = Runner(formatter)
for file_ in _specs():
module = imp.load_source(file_.replace('.py', ''), file_)
runner.run(module)
formatter.format_summary()
if runner.has_failed_specs:
sys.exit(1)
def _specs():
if len(sys.argv) == 1:
collected = []
for root, dirs, files in os.walk('spec'):
collected.extend([os.path.join(root, file_) for file_ in files if file_.endswith('_spec.py')])
collected.sort()
return collected
else:
return sys.argv[1:]
if __name__ == '__main__':
main()
|
Load specs from spec/**/*_spec.py if no spec was specified
|
Load specs from spec/**/*_spec.py if no spec was specified
|
Python
|
mit
|
angelsanz/mamba,eferro/mamba,alejandrodob/mamba,dex4er/mamba,nestorsalceda/mamba,markng/mamba,jaimegildesagredo/mamba
|
---
+++
@@ -1,6 +1,7 @@
# -*- coding: utf-8 -*-
import sys
+import os
import imp
from mamba import formatters
@@ -11,7 +12,7 @@
formatter = formatters.DocumentationFormatter()
runner = Runner(formatter)
- for file_ in sys.argv[1:]:
+ for file_ in _specs():
module = imp.load_source(file_.replace('.py', ''), file_)
runner.run(module)
@@ -21,5 +22,16 @@
sys.exit(1)
+def _specs():
+ if len(sys.argv) == 1:
+ collected = []
+ for root, dirs, files in os.walk('spec'):
+ collected.extend([os.path.join(root, file_) for file_ in files if file_.endswith('_spec.py')])
+ collected.sort()
+ return collected
+ else:
+ return sys.argv[1:]
+
+
if __name__ == '__main__':
main()
|
3d1c4c3bd3dd6ae48e75772a2f2706d6104d189c
|
googkit.py
|
googkit.py
|
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
|
import os
import sys
from commands.apply_config import ApplyConfigCommand
from commands.compile import CompileCommand
from commands.init import InitCommand
from commands.setup import SetupCommand
from commands.update_deps import UpdateDepsCommand
from lib.config import Config
from lib.error import GoogkitError
CONFIG = 'googkit.cfg'
COMMANDS_DICT = {
'apply-config': [ApplyConfigCommand, UpdateDepsCommand],
'compile': [CompileCommand],
'init': [InitCommand],
'setup': [SetupCommand, UpdateDepsCommand],
'update-deps': [UpdateDepsCommand]}
def print_help():
print('Usage: googkit command')
print('')
print('Available subcommands:')
for name in sorted(COMMANDS_DICT.keys()):
print(' ' + name)
if __name__ == '__main__':
if len(sys.argv) != 2:
print_help()
sys.exit()
subcommand_classes = COMMANDS_DICT.get(sys.argv[1])
if subcommand_classes is None:
print_help()
sys.exit()
config = Config()
try:
while os.path.exists(os.path.relpath(CONFIG)):
before = os.getcwd()
os.chdir('..')
# Break if current dir is root.
if before == os.getcwd():
break
config.load(CONFIG)
except IOError:
config = None
try:
for klass in subcommand_classes:
subcommand = klass(config)
subcommand.run()
except GoogkitError, e:
sys.exit('[ERROR] ' + str(e))
|
Support making available to exec cmd on sub dir
|
Support making available to exec cmd on sub dir
|
Python
|
mit
|
googkit/googkit,googkit/googkit,googkit/googkit
|
---
+++
@@ -39,6 +39,14 @@
config = Config()
try:
+ while os.path.exists(os.path.relpath(CONFIG)):
+ before = os.getcwd()
+ os.chdir('..')
+
+ # Break if current dir is root.
+ if before == os.getcwd():
+ break
+
config.load(CONFIG)
except IOError:
config = None
|
f00eeefd5c7b13c8fdff2ff213a50f2c13423073
|
example.py
|
example.py
|
#!/usr/bin/env python3
import numpy as np
import copy as cp
import rust_sorting as rs
N = 5
max_val = 10.0
dtypes = [np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64,
np.float32, np.float64]
for dtype in dtypes:
# print("dtype:", dtype)
array = np.array(max_val*np.random.rand(N), dtype=dtype)
orig_array = cp.deepcopy(array)
# print("Python: to sort =", repr(array))
# rs.sort(array)
# rs.insertionsort(array)
# rs.selectionsort(array)
rs.quicksort(array)
# print("Python: sorted =", repr(array))
assert((array == np.sort(orig_array)).all())
print("Python done!")
|
#!/usr/bin/env python3
import numpy as np
import copy as cp
import rust_sorting as rs
N = 5
max_val = 10.0
dtypes = [np.int8, np.int16, np.int32, np.int64,
np.uint8, np.uint16, np.uint32, np.uint64,
np.float32, np.float64]
for dtype in dtypes:
# print("dtype:", dtype)
array = np.array(max_val*(np.random.rand(N) - 0.5), dtype=dtype)
orig_array = cp.deepcopy(array)
# print("Python: to sort =", repr(array))
# rs.sort(array)
# rs.insertionsort(array)
# rs.selectionsort(array)
rs.quicksort(array)
# print("Python: sorted =", repr(array))
assert((array == np.sort(orig_array)).all())
print("Python done!")
|
Change random range to be between -max_val/2 and max_val for negative numbers.
|
Change random range to be between -max_val/2 and max_val for negative numbers.
|
Python
|
bsd-3-clause
|
nbigaouette/rust-sorting,nbigaouette/rust-sorting,nbigaouette/rust-sorting
|
---
+++
@@ -15,7 +15,7 @@
for dtype in dtypes:
# print("dtype:", dtype)
- array = np.array(max_val*np.random.rand(N), dtype=dtype)
+ array = np.array(max_val*(np.random.rand(N) - 0.5), dtype=dtype)
orig_array = cp.deepcopy(array)
|
f1a5c564d56996d023cc891f56c28000ba24df7f
|
src/client.py
|
src/client.py
|
#!/usr/bin/env python3
import socket
import time
import math
import random
HOST, PORT = 'localhost', 7777
LIMIT = 0.5
posx, posy, posz = 0.0, 0.0, 0.0
def change_pos(*values):
range_delta = 0.1
output = []
for pos in values:
pos_min = pos - range_delta
pos_min = -0.5 if pos_min < -0.5 else pos_min
pos_max = pos + range_delta
pos_max = 0.5 if pos_max > 0.5 else pos_max
output.append(round(random.uniform(pos_min, pos_max), 2))
return output
num = 1
while True:
with socket.socket() as sock:
sock.connect((HOST, PORT))
data = ';'.join([str(posx), str(posy), str(posz)])
sock.sendall(bytes(data, 'utf-8'))
time.sleep(0.5)
posx, posy, posz = change_pos(posx, posy, posz)
num += 1
|
#!/usr/bin/env python3
import socket
import time
import math
import random
HOST, PORT = 'localhost', 7777
LIMIT = 0.5
posx = random.uniform(-50.00, 50.00)
posy = random.uniform(-50.00, 50.00)
posz = random.uniform(-50.00, 50.00)
def change_pos(*values):
range_delta = 0.1
output = []
for pos in values:
pos_min = pos - range_delta
pos_min = -0.5 if pos_min < -0.5 else pos_min
pos_max = pos + range_delta
pos_max = 0.5 if pos_max > 0.5 else pos_max
output.append(round(random.uniform(pos_min, pos_max), 2))
return output
num = 1
while True:
with socket.socket() as sock:
sock.connect((HOST, PORT))
data = ';'.join([str(posx), str(posy), str(posz)])
sock.sendall(bytes(data, 'utf-8'))
time.sleep(0.5)
posx, posy, posz = change_pos(posx, posy, posz)
num += 1
|
Use better start values for arm
|
Use better start values for arm
|
Python
|
mit
|
saleone/bachelor-thesis
|
---
+++
@@ -8,7 +8,9 @@
LIMIT = 0.5
-posx, posy, posz = 0.0, 0.0, 0.0
+posx = random.uniform(-50.00, 50.00)
+posy = random.uniform(-50.00, 50.00)
+posz = random.uniform(-50.00, 50.00)
def change_pos(*values):
range_delta = 0.1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.