commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
355b78dfa8be2000a1e3198231088c7b3cec0d18 | alexandria/__init__.py | alexandria/__init__.py | import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.include('.session')
config.include('.security')
config.add_static_view('css', 'alexandria:static/css', cache_max_age=3600)
config.add_static_view('js', 'alexandria:static/js', cache_max_age=3600)
config.add_static_view('static', 'alexandria:static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
use_global_views=True
)
# Scan the views sub-module
config.scan('.views')
return config.make_wsgi_app()
| import logging
log = logging.getLogger(__name__)
from pyramid.config import Configurator
from sqlalchemy import engine_from_config
from .models import DBSession
required_settings = [
'pyramid.secret.session',
'pyramid.secret.auth',
]
def main(global_config, **settings):
""" This function returns a Pyramid WSGI application.
"""
engine = engine_from_config(settings, 'sqlalchemy.')
DBSession.configure(bind=engine)
config = Configurator(settings=settings)
do_start = True
for _req in required_settings:
if _req not in settings:
log.error('{} is not set in configuration file.'.format(_req))
do_start = False
if do_start is False:
log.error('Unable to start due to missing configuration')
exit(-1)
# Include the transaction manager
config.include('pyramid_tm')
config.include('.session')
config.include('.security')
config.add_static_view('css', 'alexandria:static/css', cache_max_age=3600)
config.add_static_view('js', 'alexandria:static/js', cache_max_age=3600)
config.add_static_view('static', 'alexandria:static', cache_max_age=3600)
config.add_route('main',
'/*traverse',
factory='.traversal.Root',
use_global_views=True
)
# Scan the views sub-module
config.scan('.views')
return config.make_wsgi_app()
| Set up the factory for the default route | Set up the factory for the default route
| Python | isc | bertjwregeer/alexandria,bertjwregeer/alexandria,cdunklau/alexandria,cdunklau/alexandria,cdunklau/alexandria | ---
+++
@@ -41,6 +41,7 @@
config.add_route('main',
'/*traverse',
+ factory='.traversal.Root',
use_global_views=True
)
|
fb13bc1a5735912b28026a80f7698cb0c299151f | tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py | tools/chrome_proxy/integration_tests/chrome_proxy_pagesets/reenable_after_bypass.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry import story
class ReenableAfterBypassPage(page_module.Page):
"""A test page for the re-enable after bypass tests.
Attributes:
bypass_seconds_min: The minimum number of seconds that the bypass
triggered by loading this page should last.
bypass_seconds_max: The maximum number of seconds that the bypass
triggered by loading this page should last.
"""
def __init__(self,
url,
page_set,
bypass_seconds_min,
bypass_seconds_max):
super(ReenableAfterBypassPage, self).__init__(url=url, page_set=page_set)
self.bypass_seconds_min = bypass_seconds_min
self.bypass_seconds_max = bypass_seconds_max
class ReenableAfterBypassStorySet(story.StorySet):
""" Chrome proxy test sites """
def __init__(self):
super(ReenableAfterBypassStorySet, self).__init__()
# Test page for "Chrome-Proxy: block=0". Loading this page should cause all
# data reduction proxies to be bypassed for one to five minutes.
self.AddStory(ReenableAfterBypassPage(
url="http://check.googlezip.net/block",
page_set=self,
bypass_seconds_min=60,
bypass_seconds_max=300))
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry.page import page as page_module
from telemetry import story
class ReenableAfterBypassPage(page_module.Page):
"""A test page for the re-enable after bypass tests.
Attributes:
bypass_seconds_min: The minimum number of seconds that the bypass
triggered by loading this page should last.
bypass_seconds_max: The maximum number of seconds that the bypass
triggered by loading this page should last.
"""
def __init__(self,
url,
page_set,
bypass_seconds_min,
bypass_seconds_max):
super(ReenableAfterBypassPage, self).__init__(url=url, page_set=page_set)
self.bypass_seconds_min = bypass_seconds_min
self.bypass_seconds_max = bypass_seconds_max
class ReenableAfterBypassStorySet(story.StorySet):
""" Chrome proxy test sites """
def __init__(self):
super(ReenableAfterBypassStorySet, self).__init__()
# Test page for "Chrome-Proxy: block=0". Loading this page should cause all
# data reduction proxies to be bypassed for one to five minutes.
self.AddStory(ReenableAfterBypassPage(
url="http://check.googlezip.net/block/",
page_set=self,
bypass_seconds_min=60,
bypass_seconds_max=300))
| Add trailing slash to chrome_proxy telemetry test page URL. | Add trailing slash to chrome_proxy telemetry test page URL.
BUG=507797
Review URL: https://codereview.chromium.org/1229563002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#337895}
| Python | bsd-3-clause | lihui7115/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,lihui7115/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,ltilve/ChromiumGStreamerBackend,CapOM/ChromiumGStreamerBackend | ---
+++
@@ -35,7 +35,7 @@
# Test page for "Chrome-Proxy: block=0". Loading this page should cause all
# data reduction proxies to be bypassed for one to five minutes.
self.AddStory(ReenableAfterBypassPage(
- url="http://check.googlezip.net/block",
+ url="http://check.googlezip.net/block/",
page_set=self,
bypass_seconds_min=60,
bypass_seconds_max=300)) |
b785fdc041a41f06c10e08d1e4e4b2bd4a5e5f90 | tests/test_uploader.py | tests/test_uploader.py | """Tests for the uploader module"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import open
from future import standard_library
standard_library.install_aliases()
from os.path import join
from datapackage import DataPackage
from pytest import fixture
from json import loads
from gobble.config import ASSETS_DIR
from gobble.uploader import Uploader
from gobble.user import User
@fixture
def user():
return User()
@fixture
def package():
filepath = join(ASSETS_DIR, 'mexican-budget-samples', 'datapackage.json')
return DataPackage(filepath)
# noinspection PyShadowingNames
def test_build_payloads(user, package):
uploader = Uploader(user, package)
expected = join(ASSETS_DIR, 'mexican-budget-samples', 'payload.json')
with open(expected) as json:
assert uploader.payload == loads(json.read())
| """Tests for the uploader module"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from datapackage import DataPackage
from future import standard_library
from gobble.user import User
standard_library.install_aliases()
from json import loads
from io import open
from gobble.uploader import Uploader
# noinspection PyUnresolvedReferences
from tests.fixtures import (dummy_requests,
ROOT_DIR,
PACKAGE_FILE,
UPLOADER_PAYLOAD)
# noinspection PyShadowingNames
def test_build_payloads(dummy_requests):
with dummy_requests:
user = User()
package = DataPackage(PACKAGE_FILE)
uploader = Uploader(user, package)
with open(UPLOADER_PAYLOAD) as json:
assert uploader.payload == loads(json.read())
| Refactor uploader tests to use the fixture module. | Refactor uploader tests to use the fixture module.
| Python | mit | openspending/gobble | ---
+++
@@ -1,38 +1,33 @@
"""Tests for the uploader module"""
+
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
-from builtins import open
+from datapackage import DataPackage
from future import standard_library
+
+from gobble.user import User
standard_library.install_aliases()
-from os.path import join
-from datapackage import DataPackage
-from pytest import fixture
from json import loads
+from io import open
-from gobble.config import ASSETS_DIR
from gobble.uploader import Uploader
-from gobble.user import User
-
-
-@fixture
-def user():
- return User()
-
-
-@fixture
-def package():
- filepath = join(ASSETS_DIR, 'mexican-budget-samples', 'datapackage.json')
- return DataPackage(filepath)
+# noinspection PyUnresolvedReferences
+from tests.fixtures import (dummy_requests,
+ ROOT_DIR,
+ PACKAGE_FILE,
+ UPLOADER_PAYLOAD)
# noinspection PyShadowingNames
-def test_build_payloads(user, package):
- uploader = Uploader(user, package)
- expected = join(ASSETS_DIR, 'mexican-budget-samples', 'payload.json')
- with open(expected) as json:
- assert uploader.payload == loads(json.read())
+def test_build_payloads(dummy_requests):
+ with dummy_requests:
+ user = User()
+ package = DataPackage(PACKAGE_FILE)
+ uploader = Uploader(user, package)
+ with open(UPLOADER_PAYLOAD) as json:
+ assert uploader.payload == loads(json.read()) |
a80d20fd0419e7e6aa9a9d83145fcab2987e286c | deploy/fabfile.py | deploy/fabfile.py | from fabric.api import env, local, run, sudo
env.user = 'root'
#env.hosts = ['204.232.205.6']
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
env.chef_executable = '/var/lib/gems/1.8/bin/chef-solo'
def install_chef():
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core libopenssl-ruby rubygems ruby ruby-dev', pty=True)
#sudo('gem install chef --no-ri --no-rdoc', pty=True)
sudo('gem install chef --no-ri --no-rdoc -V 0.10.2', pty=True)
def sync_config():
local('rsync -av . %s@%s:/etc/chef' % (env.user, env.hosts[0]))
def update():
sync_config()
sudo('cd /etc/chef && %s' % env.chef_executable, pty=True)
def reload():
"Reload the server."
env.user = "docs"
run("kill -HUP `cat %s/gunicorn.pid`" % env.rundir, pty=True)
def restart():
"Restart (or just start) the server"
sudo('restart readthedocs-gunicorn', pty=True)
| from fabric.api import env, local, run, sudo
env.user = 'root'
#env.hosts = ['204.232.205.6']
env.code_dir = '/home/docs/sites/readthedocs.org/checkouts/readthedocs.org'
env.virtualenv = '/home/docs/sites/readthedocs.org'
env.rundir = '/home/docs/sites/readthedocs.org/run'
env.chef_executable = '/var/lib/gems/1.8/bin/chef-solo'
def install_chef():
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core libopenssl-ruby rubygems ruby ruby-dev', pty=True)
#sudo('gem install chef --no-ri --no-rdoc', pty=True)
sudo('gem install chef --no-ri --no-rdoc --version=0.10.2', pty=True)
def sync_config():
local('rsync -av . %s@%s:/etc/chef' % (env.user, env.hosts[0]))
def update():
sync_config()
sudo('cd /etc/chef && %s' % env.chef_executable, pty=True)
def reload():
"Reload the server."
env.user = "docs"
run("kill -HUP `cat %s/gunicorn.pid`" % env.rundir, pty=True)
def restart():
"Restart (or just start) the server"
sudo('restart readthedocs-gunicorn', pty=True)
| Make sure to use the same version of chef as the vagrant | Make sure to use the same version of chef as the vagrant
| Python | mit | tddv/readthedocs.org,sils1297/readthedocs.org,tddv/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,raven47git/readthedocs.org,emawind84/readthedocs.org,kdkeyser/readthedocs.org,kenwang76/readthedocs.org,techtonik/readthedocs.org,d0ugal/readthedocs.org,davidfischer/readthedocs.org,istresearch/readthedocs.org,cgourlay/readthedocs.org,takluyver/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,stevepiercy/readthedocs.org,pombredanne/readthedocs.org,sunnyzwh/readthedocs.org,safwanrahman/readthedocs.org,attakei/readthedocs-oauth,sid-kap/readthedocs.org,techtonik/readthedocs.org,LukasBoersma/readthedocs.org,attakei/readthedocs-oauth,kenshinthebattosai/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,dirn/readthedocs.org,CedarLogic/readthedocs.org,espdev/readthedocs.org,raven47git/readthedocs.org,LukasBoersma/readthedocs.org,nikolas/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,sils1297/readthedocs.org,takluyver/readthedocs.org,KamranMackey/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,clarkperkins/readthedocs.org,safwanrahman/readthedocs.org,Carreau/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,rtfd/readthedocs.org,dirn/readthedocs.org,titiushko/readthedocs.org,hach-que/readthedocs.org,KamranMackey/readthedocs.org,safwanrahman/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,hach-que/readthedocs.org,nyergler/pythonslides,CedarLogic/readthedocs.org,mhils/readthedocs.org,mrshoki/readthedocs.org,SteveViss/readthedocs.org,rtfd/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,espdev/readthedocs.org,jerel/readthedocs.org,kdkeyser/readthedocs.org,safwanrahman/readthedocs.org,sid-kap/readthedocs.org,dirn/readthedocs.org,d0ugal/readthedocs.org,istresearch/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,istresearch/readthedocs.org,royalwang/readthedocs.org,Tazer/readthedocs.org,ojii/readthedocs.org,raven47git/readthedocs.org,VishvajitP/readthedocs.org,mrshoki/readthedocs.org,atsuyim/readthedocs.org,johncosta/private-readthedocs.org,nyergler/pythonslides,wanghaven/readthedocs.org,d0ugal/readthedocs.org,GovReady/readthedocs.org,sils1297/readthedocs.org,nyergler/pythonslides,emawind84/readthedocs.org,GovReady/readthedocs.org,takluyver/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,fujita-shintaro/readthedocs.org,asampat3090/readthedocs.org,CedarLogic/readthedocs.org,singingwolfboy/readthedocs.org,Tazer/readthedocs.org,cgourlay/readthedocs.org,kenshinthebattosai/readthedocs.org,stevepiercy/readthedocs.org,michaelmcandrew/readthedocs.org,wijerasa/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,agjohnson/readthedocs.org,asampat3090/readthedocs.org,nikolas/readthedocs.org,soulshake/readthedocs.org,GovReady/readthedocs.org,ojii/readthedocs.org,fujita-shintaro/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,dirn/readthedocs.org,johncosta/private-readthedocs.org,LukasBoersma/readthedocs.org,Tazer/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,mrshoki/readthedocs.org,rtfd/readthedocs.org,titiushko/readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,gjtorikian/readthedocs.org,VishvajitP/readthedocs.org,tddv/readthedocs.org,mhils/readthedocs.org,johncosta/private-readthedocs.org,takluyver/readthedocs.org,fujita-shintaro/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,KamranMackey/readthedocs.org,rtfd/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,KamranMackey/readthedocs.org,mrshoki/readthedocs.org,laplaceliu/readthedocs.org,gjtorikian/readthedocs.org,cgourlay/readthedocs.org,pombredanne/readthedocs.org,GovReady/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,agjohnson/readthedocs.org,raven47git/readthedocs.org,ojii/readthedocs.org,davidfischer/readthedocs.org,clarkperkins/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,wanghaven/readthedocs.org,pombredanne/readthedocs.org,atsuyim/readthedocs.org,nyergler/pythonslides,wijerasa/readthedocs.org,nikolas/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,SteveViss/readthedocs.org,soulshake/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,kdkeyser/readthedocs.org,gjtorikian/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,royalwang/readthedocs.org,wijerasa/readthedocs.org,atsuyim/readthedocs.org,hach-que/readthedocs.org,SteveViss/readthedocs.org,sunnyzwh/readthedocs.org,kdkeyser/readthedocs.org,agjohnson/readthedocs.org,ojii/readthedocs.org,laplaceliu/readthedocs.org,Carreau/readthedocs.org,emawind84/readthedocs.org,davidfischer/readthedocs.org,kenwang76/readthedocs.org,SteveViss/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,sunnyzwh/readthedocs.org,kenshinthebattosai/readthedocs.org,wanghaven/readthedocs.org,michaelmcandrew/readthedocs.org,techtonik/readthedocs.org,davidfischer/readthedocs.org | ---
+++
@@ -13,7 +13,7 @@
sudo('apt-get update', pty=True)
sudo('apt-get install -y git-core libopenssl-ruby rubygems ruby ruby-dev', pty=True)
#sudo('gem install chef --no-ri --no-rdoc', pty=True)
- sudo('gem install chef --no-ri --no-rdoc -V 0.10.2', pty=True)
+ sudo('gem install chef --no-ri --no-rdoc --version=0.10.2', pty=True)
def sync_config(): |
69d013b768edabb6bc7dbe78b1f219ea1b49db16 | sqlitebiter/_config.py | sqlitebiter/_config.py | #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import appconfigpy
from ._const import PROGRAM_NAME
class ConfigKey(object):
PROXY_SERVER = "proxy_server"
GS_CREDENTIALS_FILE_PATH = "gs_credentials_file_path"
app_config_manager = appconfigpy.ConfigManager(
config_name=PROGRAM_NAME,
config_item_list=[
appconfigpy.ConfigItem(
name=ConfigKey.PROXY_SERVER,
prompt_text="HTTP/HTTPS proxy server URI",
initial_value="",
),
# appconfigpy.ConfigItem(
# name="gs_credentials_file_path",
# prompt_text="Google Sheets credentials file path",
# initial_value="",
# ),
])
| #!/usr/bin/env python
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
import appconfigpy
from ._const import PROGRAM_NAME
class ConfigKey(object):
DEFAULT_ENCODING = "default_encoding"
PROXY_SERVER = "proxy_server"
GS_CREDENTIALS_FILE_PATH = "gs_credentials_file_path"
app_config_manager = appconfigpy.ConfigManager(
config_name=PROGRAM_NAME,
config_item_list=[
appconfigpy.ConfigItem(
name=ConfigKey.DEFAULT_ENCODING,
prompt_text="Default encoding to load files",
initial_value="utf-8"),
appconfigpy.ConfigItem(
name=ConfigKey.PROXY_SERVER,
prompt_text="HTTP/HTTPS proxy server URI",
initial_value=""),
# appconfigpy.ConfigItem(
# name="gs_credentials_file_path",
# prompt_text="Google Sheets credentials file path",
# initial_value="",
# ),
])
| Add default encoding parameter to configure subcommand | Add default encoding parameter to configure subcommand
| Python | mit | thombashi/sqlitebiter,thombashi/sqlitebiter | ---
+++
@@ -14,6 +14,7 @@
class ConfigKey(object):
+ DEFAULT_ENCODING = "default_encoding"
PROXY_SERVER = "proxy_server"
GS_CREDENTIALS_FILE_PATH = "gs_credentials_file_path"
@@ -22,10 +23,13 @@
config_name=PROGRAM_NAME,
config_item_list=[
appconfigpy.ConfigItem(
+ name=ConfigKey.DEFAULT_ENCODING,
+ prompt_text="Default encoding to load files",
+ initial_value="utf-8"),
+ appconfigpy.ConfigItem(
name=ConfigKey.PROXY_SERVER,
prompt_text="HTTP/HTTPS proxy server URI",
- initial_value="",
- ),
+ initial_value=""),
# appconfigpy.ConfigItem(
# name="gs_credentials_file_path",
# prompt_text="Google Sheets credentials file path", |
71677afa8e5146023dc63c28a187ad5610d5b90a | upsrv/conary_schema.py | upsrv/conary_schema.py | #!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary import dbstore
from .config import UpsrvConfig
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
try:
cfg = UpsrvConfig.load()
except cfgtypes.CfgEnvironmentError:
print "Error reading config file"
sys.exit(1)
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| #!/usr/bin/python
#
# Copyright (c) SAS Institute Inc.
#
import sys
import os
import pwd
from conary.server import schema
from conary.lib import cfgtypes, tracelog
from conary import dbstore
from .config import UpsrvConfig
class SimpleFileLog(tracelog.FileLog):
def printLog(self, level, msg):
self.fd.write("%s\n" % msg)
try:
cfg = UpsrvConfig.load()
except cfgtypes.CfgEnvironmentError:
print "Error reading config file"
sys.exit(1)
if not cfg.repositoryDB:
print "In proxy mode, no migration required"
sys.exit(0)
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2)
db = dbstore.connect(cfg.repositoryDB[1], cfg.repositoryDB[0])
schema.loadSchema(db, doMigrate=True)
if cfg.repositoryDB[0] == 'sqlite':
os.chown(cfg.repositoryDB[1], pwd.getpwnam('apache')[2],
pwd.getpwnam('apache')[3])
| Fix traceback being emitted when updating a proxy-mode rus (RCE-2260) | Fix traceback being emitted when updating a proxy-mode rus (RCE-2260)
| Python | apache-2.0 | sassoftware/rbm,sassoftware/rbm,sassoftware/rbm | ---
+++
@@ -21,6 +21,9 @@
except cfgtypes.CfgEnvironmentError:
print "Error reading config file"
sys.exit(1)
+if not cfg.repositoryDB:
+ print "In proxy mode, no migration required"
+ sys.exit(0)
tracelog.FileLog = SimpleFileLog
tracelog.initLog(filename='stderr', level=2) |
f4c8f003a4ffdd8e64468d261aa2cd34d58f1b9d | src/compdb/__init__.py | src/compdb/__init__.py | import warnings
from signac import *
msg = "compdb was renamed to signac. Please import signac in the future."
warnings.warn(DeprecationWarning, msg)
| import warnings
from signac import *
__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
print('Warning!',msg)
warnings.warn(msg, DeprecationWarning)
| Add surrogate compdb package, linking to signac. | Add surrogate compdb package, linking to signac.
Provided to guarantee compatibility.
Prints warning on import.
| Python | bsd-3-clause | csadorf/signac,csadorf/signac | ---
+++
@@ -1,6 +1,8 @@
import warnings
from signac import *
+__all__ = ['core', 'contrib', 'db']
msg = "compdb was renamed to signac. Please import signac in the future."
-warnings.warn(DeprecationWarning, msg)
+print('Warning!',msg)
+warnings.warn(msg, DeprecationWarning) |
a25d7cbe7a2fb583e08e642e846edd7d647f2013 | tests/test_engine_import.py | tests/test_engine_import.py | import unittest
import os
from stevedore.extension import ExtensionManager
ENGINES = [
'lammps',
'openfoam',
'kratos',
'jyulb']
if os.getenv("HAVE_NUMERRIN", "no") == "yes":
ENGINES.append("numerrin")
class TestEngineImport(unittest.TestCase):
def test_engine_import(self):
extension_manager = ExtensionManager(namespace='simphony.engine')
for engine in ENGINES:
if engine not in extension_manager:
self.fail("`{}` could not be imported".format(engine))
| import unittest
import os
from stevedore.extension import ExtensionManager
ENGINES = [
'lammps',
'openfoam_file_io',
'openfoam_internal',
'kratos',
'jyulb_fileio_isothermal',
'jyulb_internal_isothermal']
if os.getenv("HAVE_NUMERRIN", "no") == "yes":
ENGINES.append("numerrin")
class TestEngineImport(unittest.TestCase):
def test_engine_import(self):
extension_manager = ExtensionManager(namespace='simphony.engine')
for engine in ENGINES:
if engine not in extension_manager:
self.fail("`{}` could not be imported".format(engine))
| Fix engine extension names for jyulb and openfoam | Fix engine extension names for jyulb and openfoam
| Python | bsd-2-clause | simphony/simphony-framework | ---
+++
@@ -5,9 +5,11 @@
ENGINES = [
'lammps',
- 'openfoam',
+ 'openfoam_file_io',
+ 'openfoam_internal',
'kratos',
- 'jyulb']
+ 'jyulb_fileio_isothermal',
+ 'jyulb_internal_isothermal']
if os.getenv("HAVE_NUMERRIN", "no") == "yes":
ENGINES.append("numerrin") |
8a19543354a82f586d7aed7913c71e52dbd7d55c | src/librement/account/backends.py | src/librement/account/backends.py | from django.contrib.auth.backends import ModelBackend
from .models import Email
class LibrementBackend(ModelBackend):
def authenticate(self, email=None, password=None):
try:
email = Email.objects.get(email__iexact=email)
if email.user.check_password(password):
return email.user
except Email.DoesNotExist:
return None
| from django.contrib.auth.backends import ModelBackend
from .models import Email
class LibrementBackend(ModelBackend):
def authenticate(self, email=None, password=None):
try:
email = Email.objects.get(email__iexact=email)
for candidate in (
password,
password.swapcase(),
password[0:1].lower() + password[1:],
):
if email.user.check_password(candidate):
return email.user
except Email.DoesNotExist:
return None
| Allow a bunch of different combinations of password. | Allow a bunch of different combinations of password.
Signed-off-by: Chris Lamb <29e6d179a8d73471df7861382db6dd7e64138033@debian.org>
| Python | agpl-3.0 | rhertzog/librement,rhertzog/librement,rhertzog/librement | ---
+++
@@ -7,8 +7,13 @@
try:
email = Email.objects.get(email__iexact=email)
- if email.user.check_password(password):
- return email.user
+ for candidate in (
+ password,
+ password.swapcase(),
+ password[0:1].lower() + password[1:],
+ ):
+ if email.user.check_password(candidate):
+ return email.user
except Email.DoesNotExist:
return None |
261883f80174873af38a17ac7b0ebe7a79263d85 | project/scripts/unit_tests.py | project/scripts/unit_tests.py | #!/usr/bin/env python3
import unittest
import pandas as pd
from fetch_trends import fetch_hourly_data
class TestFetch(unittest.TestCase):
def test_fetch_type(self):
result = fetch_hourly_data("test", 2021, 1, 1, 2021, 1, 12)
self.assertIsInstance(result, pd.DataFrame, "Should be a dataframe")
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python3
import unittest
import pandas as pd
from fetch_trends import fetch_hourly_data, aggregate_hourly_to_daily
from dates import get_end_times, get_start_times
class TestFetch(unittest.TestCase):
def setUp(self):
self.hourly_result = fetch_hourly_data("test", 2021, 1, 1, 2021, 1, 1)
self.daily_result = aggregate_hourly_to_daily(self.hourly_result)
def test_fetch_return_types(self):
self.assertIsInstance(self.hourly_result, pd.DataFrame, "Should be a dataframe")
self.assertIsInstance(self.daily_result, pd.DataFrame, "Should be a dataframe")
def test_dataframe_lengths(self):
self.assertEquals(len(self.hourly_result), 24, "Should have 24 hours of data")
self.assertEquals(len(self.daily_result), 1, "Should have one day of data")
def test_daily_is_aggregate(self):
sum_hourly = sum(self.hourly_result['test'].tolist())
self.assertEquals(sum_hourly, self.daily_result['test'].tolist()[0])
class TestDates(unittest.TestCase):
def setUp(self):
self.start = get_start_times(0)
self.end = get_end_times()
def test_dates_return_types(self):
self.assertIsInstance(self.start, tuple, "Must return tuple")
self.assertIsInstance(self.end, tuple, "Must return tuple")
def test_dates_return_contents(self):
for val in self.start:
self.assertIsInstance(val, int, "Tuple contents must be ints")
for val in self.end:
self.assertIsInstance(val, int, "Tuple contents must be ints")
def test_dates_return_length(self):
self.assertEqual(len(self.start), 3, "Must return 3 integers")
self.assertEqual(len(self.end), 3, "Must return 3 integers")
def test_epoch_to_date(self):
self.assertEqual(self.start, (1970, 1, 1), "Should be epoch date")
if __name__ == '__main__':
unittest.main()
| Add unit testing of dates and fetch functions | Add unit testing of dates and fetch functions
| Python | apache-2.0 | googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks,googleinterns/sgonks | ---
+++
@@ -3,14 +3,50 @@
import unittest
import pandas as pd
-from fetch_trends import fetch_hourly_data
+from fetch_trends import fetch_hourly_data, aggregate_hourly_to_daily
+from dates import get_end_times, get_start_times
class TestFetch(unittest.TestCase):
- def test_fetch_type(self):
- result = fetch_hourly_data("test", 2021, 1, 1, 2021, 1, 12)
- self.assertIsInstance(result, pd.DataFrame, "Should be a dataframe")
+ def setUp(self):
+ self.hourly_result = fetch_hourly_data("test", 2021, 1, 1, 2021, 1, 1)
+ self.daily_result = aggregate_hourly_to_daily(self.hourly_result)
+
+ def test_fetch_return_types(self):
+ self.assertIsInstance(self.hourly_result, pd.DataFrame, "Should be a dataframe")
+ self.assertIsInstance(self.daily_result, pd.DataFrame, "Should be a dataframe")
+
+ def test_dataframe_lengths(self):
+ self.assertEquals(len(self.hourly_result), 24, "Should have 24 hours of data")
+ self.assertEquals(len(self.daily_result), 1, "Should have one day of data")
+
+ def test_daily_is_aggregate(self):
+ sum_hourly = sum(self.hourly_result['test'].tolist())
+ self.assertEquals(sum_hourly, self.daily_result['test'].tolist()[0])
+
+
+class TestDates(unittest.TestCase):
+ def setUp(self):
+ self.start = get_start_times(0)
+ self.end = get_end_times()
+
+ def test_dates_return_types(self):
+ self.assertIsInstance(self.start, tuple, "Must return tuple")
+ self.assertIsInstance(self.end, tuple, "Must return tuple")
+
+ def test_dates_return_contents(self):
+ for val in self.start:
+ self.assertIsInstance(val, int, "Tuple contents must be ints")
+ for val in self.end:
+ self.assertIsInstance(val, int, "Tuple contents must be ints")
+
+ def test_dates_return_length(self):
+ self.assertEqual(len(self.start), 3, "Must return 3 integers")
+ self.assertEqual(len(self.end), 3, "Must return 3 integers")
+
+ def test_epoch_to_date(self):
+ self.assertEqual(self.start, (1970, 1, 1), "Should be epoch date")
if __name__ == '__main__':
unittest.main() |
2582eaeb27bf5cb6185cd2400edf56fe3c5cbacc | nlppln/wfgenerator.py | nlppln/wfgenerator.py | import os
from scriptcwl import WorkflowGenerator as WFGenerator
class WorkflowGenerator(WFGenerator):
def __init__(self):
module_path = os.path.dirname(os.path.realpath(__file__))
steps_dir = os.path.join(module_path, '../cwl/steps/')
steps_dir = os.path.realpath(steps_dir)
WFGenerator.__init__(self, steps_dir=steps_dir)
| import os
from scriptcwl import WorkflowGenerator as WFGenerator
class WorkflowGenerator(WFGenerator):
def __init__(self):
module_path = os.path.dirname(os.path.realpath(__file__))
steps_dir = os.path.join(module_path, '../cwl/')
steps_dir = os.path.realpath(steps_dir)
WFGenerator.__init__(self, steps_dir=steps_dir)
| Load steps (CommandLineTools) from new directory | Load steps (CommandLineTools) from new directory
This also required changes to scriptcwl; scriptcwl now checks whether
a cwl-file contains a CommandLineTool or a Workflow. If it contains a
Workflow, an empty step is added to the steps library. This needs to
be fixed.
| Python | apache-2.0 | WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln,WhatWorksWhenForWhom/nlppln | ---
+++
@@ -6,7 +6,7 @@
class WorkflowGenerator(WFGenerator):
def __init__(self):
module_path = os.path.dirname(os.path.realpath(__file__))
- steps_dir = os.path.join(module_path, '../cwl/steps/')
+ steps_dir = os.path.join(module_path, '../cwl/')
steps_dir = os.path.realpath(steps_dir)
WFGenerator.__init__(self, steps_dir=steps_dir) |
91f1accf58e0446eb9ac6421cbf148b619c09ea9 | grappa_http/__init__.py | grappa_http/__init__.py | # -*- coding: utf-8 -*
"""
`grappa_http` provides HTTP protocol assertion for `grappa` testing library.
Example::
import grappa
import grappa_http
# Register plugin
grappa.use(grappa_http)
# Use plugin assertion
res = requests.get('httpbin.org/status/204')
res | should.have.status(204)
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export register function
from . import adapters
from .plugin import register
from grappa import should, expect, use
# Register Python operator
__all__ = ('should', 'expect', 'register', 'adapters')
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.2'
# Self-register plugin in grappa
use(register)
| # -*- coding: utf-8 -*
"""
`grappa_http` provides HTTP protocol assertion for `grappa` testing library.
Example::
import grappa
import grappa_http
# Register plugin
grappa.use(grappa_http)
# Use plugin assertion
res = requests.get('httpbin.org/status/204')
res | should.have.status(204)
For assertion operators and aliases, see `operators documentation`_.
.. _`operators documentation`: operators.html
Reference
---------
"""
# Export register function
from . import adapters
from .plugin import register
from grappa import should, expect, use
# Register Python operator
__all__ = ('should', 'expect', 'register', 'adapters')
# Package metadata
__author__ = 'Tomas Aparicio'
__license__ = 'MIT'
# Current package version
__version__ = '0.1.3'
# Self-register plugin in grappa
use(register)
| Bump version: 0.1.2 → 0.1.3 | Bump version: 0.1.2 → 0.1.3
| Python | mit | grappa-py/http | ---
+++
@@ -35,7 +35,7 @@
__license__ = 'MIT'
# Current package version
-__version__ = '0.1.2'
+__version__ = '0.1.3'
# Self-register plugin in grappa
use(register) |
1d555c184a10ae4fd84d758105e19b10828543c2 | q2_feature_classifier/tests/__init__.py | q2_feature_classifier/tests/__init__.py | # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from q2_types.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
| # ----------------------------------------------------------------------------
# Copyright (c) 2016--, Ben Kaehler
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import tempfile
import shutil
from qiime.plugin.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase):
def setUp(self):
try:
from q2_feature_classifier.plugin_setup import plugin
except ImportError:
self.fail("Could not import plugin object.")
self.plugin = plugin
self.temp_dir = tempfile.TemporaryDirectory(
prefix='q2-feature-classifier-test-temp-')
def _setup_dir(self, filenames, dirfmt):
for filename in filenames:
filepath = self.get_data_path(filename)
shutil.copy(filepath, self.temp_dir.name)
return dirfmt(self.temp_dir.name, mode='r')
| Update import location of TestPluginBase | TST: Update import location of TestPluginBase
| Python | bsd-3-clause | BenKaehler/q2-feature-classifier | ---
+++
@@ -9,7 +9,7 @@
import tempfile
import shutil
-from q2_types.testing import TestPluginBase
+from qiime.plugin.testing import TestPluginBase
class FeatureClassifierTestPluginBase(TestPluginBase): |
c15dab903d3759578449279cc034d766d362d41f | rest_framework/authtoken/serializers.py | rest_framework/authtoken/serializers.py | from django.contrib.auth import authenticate
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
raise serializers.ValidationError('User account is disabled.')
attrs['user'] = user
return attrs
else:
raise serializers.ValidationError('Unable to login with provided credentials.')
else:
raise serializers.ValidationError('Must include "username" and "password"')
| from django.contrib.auth import authenticate
from django.utils.translation import ugettext_lazy as _
from rest_framework import serializers
class AuthTokenSerializer(serializers.Serializer):
username = serializers.CharField()
password = serializers.CharField()
def validate(self, attrs):
username = attrs.get('username')
password = attrs.get('password')
if username and password:
user = authenticate(username=username, password=password)
if user:
if not user.is_active:
msg = _('User account is disabled.')
raise serializers.ValidationError()
attrs['user'] = user
return attrs
else:
msg = _('Unable to login with provided credentials.')
raise serializers.ValidationError(msg)
else:
msg = _('Must include "username" and "password"')
raise serializers.ValidationError(msg)
| Mark strings in AuthTokenSerializer as translatable | Mark strings in AuthTokenSerializer as translatable
| Python | bsd-2-clause | linovia/django-rest-framework,nhorelik/django-rest-framework,rafaelang/django-rest-framework,iheitlager/django-rest-framework,fishky/django-rest-framework,bluedazzle/django-rest-framework,damycra/django-rest-framework,HireAnEsquire/django-rest-framework,jerryhebert/django-rest-framework,gregmuellegger/django-rest-framework,ashishfinoit/django-rest-framework,kennydude/django-rest-framework,rafaelcaricio/django-rest-framework,YBJAY00000/django-rest-framework,potpath/django-rest-framework,edx/django-rest-framework,elim/django-rest-framework,rhblind/django-rest-framework,krinart/django-rest-framework,James1345/django-rest-framework,kezabelle/django-rest-framework,elim/django-rest-framework,jpadilla/django-rest-framework,d0ugal/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,jpulec/django-rest-framework,edx/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,thedrow/django-rest-framework-1,arpheno/django-rest-framework,kennydude/django-rest-framework,sehmaschine/django-rest-framework,vstoykov/django-rest-framework,akalipetis/django-rest-framework,maryokhin/django-rest-framework,qsorix/django-rest-framework,iheitlager/django-rest-framework,jness/django-rest-framework,jtiai/django-rest-framework,aericson/django-rest-framework,simudream/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,justanr/django-rest-framework,ossanna16/django-rest-framework,MJafarMashhadi/django-rest-framework,andriy-s/django-rest-framework,dmwyatt/django-rest-framework,fishky/django-rest-framework,jerryhebert/django-rest-framework,canassa/django-rest-framework,jpadilla/django-rest-framework,davesque/django-rest-framework,hnakamur/django-rest-framework,wangpanjun/django-rest-framework,simudream/django-rest-framework,maryokhin/django-rest-framework,tomchristie/django-rest-framework,pombredanne/django-rest-framework,lubomir/django-rest-framework,jtiai/django-rest-framework,hnakamur/django-rest-framework,delinhabit/django-rest-framework,AlexandreProenca/django-rest-framework,paolopaolopaolo/django-rest-framework,atombrella/django-rest-framework,fishky/django-rest-framework,mgaitan/django-rest-framework,delinhabit/django-rest-framework,vstoykov/django-rest-framework,tigeraniya/django-rest-framework,ambivalentno/django-rest-framework,uploadcare/django-rest-framework,davesque/django-rest-framework,wedaly/django-rest-framework,adambain-vokal/django-rest-framework,sheppard/django-rest-framework,xiaotangyuan/django-rest-framework,ashishfinoit/django-rest-framework,thedrow/django-rest-framework-1,kylefox/django-rest-framework,paolopaolopaolo/django-rest-framework,buptlsl/django-rest-framework,zeldalink0515/django-rest-framework,maryokhin/django-rest-framework,sbellem/django-rest-framework,kgeorgy/django-rest-framework,MJafarMashhadi/django-rest-framework,atombrella/django-rest-framework,alacritythief/django-rest-framework,tomchristie/django-rest-framework,wwj718/django-rest-framework,qsorix/django-rest-framework,brandoncazander/django-rest-framework,hunter007/django-rest-framework,pombredanne/django-rest-framework,callorico/django-rest-framework,sbellem/django-rest-framework,nryoung/django-rest-framework,justanr/django-rest-framework,qsorix/django-rest-framework,andriy-s/django-rest-framework,ebsaral/django-rest-framework,linovia/django-rest-framework,raphaelmerx/django-rest-framework,dmwyatt/django-rest-framework,ambivalentno/django-rest-framework,kezabelle/django-rest-framework,James1345/django-rest-framework,waytai/django-rest-framework,bluedazzle/django-rest-framework,jness/django-rest-framework,ossanna16/django-rest-framework,tcroiset/django-rest-framework,delinhabit/django-rest-framework,douwevandermeij/django-rest-framework,zeldalink0515/django-rest-framework,wwj718/django-rest-framework,d0ugal/django-rest-framework,wwj718/django-rest-framework,leeahoward/django-rest-framework,werthen/django-rest-framework,justanr/django-rest-framework,nryoung/django-rest-framework,tomchristie/django-rest-framework,sbellem/django-rest-framework,sehmaschine/django-rest-framework,buptlsl/django-rest-framework,damycra/django-rest-framework,kylefox/django-rest-framework,andriy-s/django-rest-framework,VishvajitP/django-rest-framework,callorico/django-rest-framework,jtiai/django-rest-framework,uruz/django-rest-framework,callorico/django-rest-framework,kgeorgy/django-rest-framework,hnarayanan/django-rest-framework,akalipetis/django-rest-framework,cyberj/django-rest-framework,jerryhebert/django-rest-framework,uploadcare/django-rest-framework,sheppard/django-rest-framework,MJafarMashhadi/django-rest-framework,antonyc/django-rest-framework,HireAnEsquire/django-rest-framework,cheif/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,rubendura/django-rest-framework,cyberj/django-rest-framework,nhorelik/django-rest-framework,ticosax/django-rest-framework,johnraz/django-rest-framework,James1345/django-rest-framework,canassa/django-rest-framework,antonyc/django-rest-framework,wzbozon/django-rest-framework,hnarayanan/django-rest-framework,cheif/django-rest-framework,potpath/django-rest-framework,YBJAY00000/django-rest-framework,leeahoward/django-rest-framework,krinart/django-rest-framework,gregmuellegger/django-rest-framework,kezabelle/django-rest-framework,arpheno/django-rest-framework,abdulhaq-e/django-rest-framework,werthen/django-rest-framework,ezheidtmann/django-rest-framework,raphaelmerx/django-rest-framework,mgaitan/django-rest-framework,cyberj/django-rest-framework,aericson/django-rest-framework,elim/django-rest-framework,hnarayanan/django-rest-framework,VishvajitP/django-rest-framework,ambivalentno/django-rest-framework,rafaelcaricio/django-rest-framework,ezheidtmann/django-rest-framework,cheif/django-rest-framework,d0ugal/django-rest-framework,akalipetis/django-rest-framework,alacritythief/django-rest-framework,kylefox/django-rest-framework,agconti/django-rest-framework,lubomir/django-rest-framework,brandoncazander/django-rest-framework,atombrella/django-rest-framework,douwevandermeij/django-rest-framework,jpulec/django-rest-framework,VishvajitP/django-rest-framework,xiaotangyuan/django-rest-framework,ajaali/django-rest-framework,adambain-vokal/django-rest-framework,rafaelang/django-rest-framework,wangpanjun/django-rest-framework,mgaitan/django-rest-framework,thedrow/django-rest-framework-1,pombredanne/django-rest-framework,nryoung/django-rest-framework,wedaly/django-rest-framework,wangpanjun/django-rest-framework,yiyocx/django-rest-framework,damycra/django-rest-framework,uruz/django-rest-framework,rhblind/django-rest-framework,sheppard/django-rest-framework,yiyocx/django-rest-framework,linovia/django-rest-framework,zeldalink0515/django-rest-framework,iheitlager/django-rest-framework,sehmaschine/django-rest-framework,lubomir/django-rest-framework,abdulhaq-e/django-rest-framework,HireAnEsquire/django-rest-framework,johnraz/django-rest-framework,YBJAY00000/django-rest-framework,rubendura/django-rest-framework,jness/django-rest-framework,potpath/django-rest-framework,rafaelang/django-rest-framework,jpulec/django-rest-framework,ticosax/django-rest-framework,leeahoward/django-rest-framework,arpheno/django-rest-framework,ebsaral/django-rest-framework,xiaotangyuan/django-rest-framework,yiyocx/django-rest-framework,ajaali/django-rest-framework,abdulhaq-e/django-rest-framework,douwevandermeij/django-rest-framework,krinart/django-rest-framework,buptlsl/django-rest-framework,brandoncazander/django-rest-framework,kgeorgy/django-rest-framework,ebsaral/django-rest-framework,tigeraniya/django-rest-framework,agconti/django-rest-framework,rubendura/django-rest-framework,uploadcare/django-rest-framework,adambain-vokal/django-rest-framework,alacritythief/django-rest-framework,bluedazzle/django-rest-framework,agconti/django-rest-framework,gregmuellegger/django-rest-framework,aericson/django-rest-framework,rhblind/django-rest-framework,ossanna16/django-rest-framework,johnraz/django-rest-framework,wedaly/django-rest-framework,simudream/django-rest-framework,edx/django-rest-framework,rafaelcaricio/django-rest-framework,paolopaolopaolo/django-rest-framework,antonyc/django-rest-framework,tcroiset/django-rest-framework,nhorelik/django-rest-framework,waytai/django-rest-framework,ashishfinoit/django-rest-framework,vstoykov/django-rest-framework,wzbozon/django-rest-framework,dmwyatt/django-rest-framework,tcroiset/django-rest-framework,uruz/django-rest-framework,ticosax/django-rest-framework,hunter007/django-rest-framework,AlexandreProenca/django-rest-framework,tigeraniya/django-rest-framework,ajaali/django-rest-framework,waytai/django-rest-framework,raphaelmerx/django-rest-framework,kennydude/django-rest-framework,wzbozon/django-rest-framework | ---
+++
@@ -1,4 +1,6 @@
from django.contrib.auth import authenticate
+from django.utils.translation import ugettext_lazy as _
+
from rest_framework import serializers
@@ -15,10 +17,13 @@
if user:
if not user.is_active:
- raise serializers.ValidationError('User account is disabled.')
+ msg = _('User account is disabled.')
+ raise serializers.ValidationError()
attrs['user'] = user
return attrs
else:
- raise serializers.ValidationError('Unable to login with provided credentials.')
+ msg = _('Unable to login with provided credentials.')
+ raise serializers.ValidationError(msg)
else:
- raise serializers.ValidationError('Must include "username" and "password"')
+ msg = _('Must include "username" and "password"')
+ raise serializers.ValidationError(msg) |
52c9768b30bf758a3ffeb2a94e10e28ab52541ab | test/__init__.py | test/__init__.py | import unittest
from controller import c3bottles, db
from view.user import User
NAME = 'user'
PASSWORD = 'test'
def load_config():
c3bottles.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
c3bottles.config['TESTING'] = True
class C3BottlesTestCase(unittest.TestCase):
def setUp(self):
load_config()
self.c3bottles = c3bottles.test_client()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def create_user(self, user):
db.session.add(user)
db.session.commit()
return User.get(user.user_id)
def create_test_user(self):
return self.create_user(User(NAME, PASSWORD))
| import unittest
from controller import c3bottles, db
from view.user import User
NAME = 'user'
PASSWORD = 'test'
def load_config():
c3bottles.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
c3bottles.config['TESTING'] = True
c3bottles.config['WTF_CSRF_ENABLED'] = False
c3bottles.config['SECRET_KEY'] = 'secret'
class C3BottlesTestCase(unittest.TestCase):
def setUp(self):
load_config()
self.c3bottles = c3bottles.test_client()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def create_user(self, user):
db.session.add(user)
db.session.commit()
return User.get(user.user_id)
def create_test_user(self):
return self.create_user(User(NAME, PASSWORD))
| Add ability to test requests | test: Add ability to test requests
Disable CSRF and set the SECRET_KEY in tests so that it's possible to
write HTTP based tests.
| Python | mit | der-michik/c3bottles,der-michik/c3bottles,der-michik/c3bottles,der-michik/c3bottles | ---
+++
@@ -11,6 +11,8 @@
def load_config():
c3bottles.config['SQLALCHEMY_DATABASE_URI'] = "sqlite://"
c3bottles.config['TESTING'] = True
+ c3bottles.config['WTF_CSRF_ENABLED'] = False
+ c3bottles.config['SECRET_KEY'] = 'secret'
class C3BottlesTestCase(unittest.TestCase): |
733f116125e7c061cf9f0e11e5b1008ee5272131 | test/conftest.py | test/conftest.py | # -*- coding: utf-8 -*-
import pytest
import os
import json
import sys
if sys.version_info[0] == 2:
from codecs import open
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| # -*- coding: utf-8 -*-
import pytest
import os
import json
import sys
from hypothesis.strategies import text
if sys.version_info[0] == 2:
from codecs import open
# We need to grab one text example from hypothesis to prime its cache.
text().example()
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here.
story_directories = (
os.path.join('test/test_fixtures', d) for d in os.listdir('test/test_fixtures')
)
story_files = (
os.path.join(storydir, name) for storydir in story_directories
for name in os.listdir(storydir)
if 'raw-data' not in storydir
)
raw_story_files = (
os.path.join('test/test_fixtures/raw-data', name)
for name in os.listdir('test/test_fixtures/raw-data')
)
@pytest.fixture(scope='class', params=story_files)
def story(request):
"""
Provides a detailed HPACK story to test with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
@pytest.fixture(scope='class', params=raw_story_files)
def raw_story(request):
"""
Provides a detailed HPACK story to test the encoder with.
"""
with open(request.param, 'r', encoding='utf-8') as f:
return json.load(f)
| Fix some test breakages with Hypothesis. | Fix some test breakages with Hypothesis.
| Python | mit | python-hyper/hpack,python-hyper/hpack | ---
+++
@@ -4,8 +4,13 @@
import json
import sys
+from hypothesis.strategies import text
+
if sys.version_info[0] == 2:
from codecs import open
+
+# We need to grab one text example from hypothesis to prime its cache.
+text().example()
# This pair of generator expressions are pretty lame, but building lists is a
# bad idea as I plan to have a substantial number of tests here. |
095d77b74a3bfad6d97387a860ac67f82f31c478 | test/conftest.py | test/conftest.py | import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
| import pytest
def pytest_addoption(parser):
parser.addoption("--travis", action="store_true", default=False,
help="Only run tests marked for Travis")
def pytest_configure(config):
config.addinivalue_line("markers",
"not_travis: Mark a test that should not be run on Travis")
def pytest_runtest_setup(item):
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
# Print a progress "." once a minute when running in travis mode
# This is an attempt to stop travis timing the builds out due to lack
# of output.
progress_process = None
def pytest_configure(config):
global progress_process
if config.getoption("--travis") and progress_process is None:
import multiprocessing
import py
terminal = py.io.TerminalWriter()
def writer():
import time
while True:
terminal.write("still alive\n")
time.sleep(60)
progress_process = multiprocessing.Process(target=writer)
progress_process.daemon = True
progress_process.start()
def pytest_unconfigure(config):
global progress_process
if config.getoption("--travis") and progress_process is not None:
progress_process.terminate()
| Print "still alive" progress when testing on travis | test: Print "still alive" progress when testing on travis
| Python | mit | tkarna/cofs | ---
+++
@@ -15,3 +15,31 @@
not_travis = item.get_marker("not_travis")
if not_travis is not None and item.config.getoption("--travis"):
pytest.skip("Skipping test marked not for Travis")
+
+
+# Print a progress "." once a minute when running in travis mode
+# This is an attempt to stop travis timing the builds out due to lack
+# of output.
+progress_process = None
+
+
+def pytest_configure(config):
+ global progress_process
+ if config.getoption("--travis") and progress_process is None:
+ import multiprocessing
+ import py
+ terminal = py.io.TerminalWriter()
+ def writer():
+ import time
+ while True:
+ terminal.write("still alive\n")
+ time.sleep(60)
+ progress_process = multiprocessing.Process(target=writer)
+ progress_process.daemon = True
+ progress_process.start()
+
+
+def pytest_unconfigure(config):
+ global progress_process
+ if config.getoption("--travis") and progress_process is not None:
+ progress_process.terminate() |
50bd3ae1cac853d34d29c697573ccf8a3fc4cd96 | tweepy/utils.py | tweepy/utils.py | # Tweepy
# Copyright 2010-2022 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
| # Tweepy
# Copyright 2010-2022 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
).replace(tzinfo=datetime.timezone.utc)
| Use %z directive in parse_datetime | Use %z directive in parse_datetime
| Python | mit | svven/tweepy,tweepy/tweepy | ---
+++
@@ -12,6 +12,5 @@
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
- datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
+ datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
).replace(tzinfo=datetime.timezone.utc)
- # Use %z when support for Python 3.6 is dropped |
52b11c93eed257622d169ab037b3b4918b936ddd | test/settings.py | test/settings.py | import django
from os import path
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('dumper', 'test', 'django.contrib.contenttypes')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'imagekit.db',
},
}
ROOT_URLCONF = 'test.urls'
# Testing
if django.VERSION[:2] < (1, 6):
INSTALLED_APPS += ('discover_runner',)
TEST_RUNNER = 'discover_runner.DiscoverRunner'
TEST_DISCOVER_TOP_LEVEL = path.dirname(path.dirname(__file__))
# Cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dumper-default'
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dumper-other'
},
}
MIDDLEWARE_CLASSES = (
'dumper.middleware.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'dumper.middleware.FetchFromCacheMiddleware',
)
| import django
from os import path
SECRET_KEY = 'not secret'
INSTALLED_APPS = ('dumper', 'test', 'django.contrib.contenttypes')
TEMPLATE_DEBUG = DEBUG = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
},
}
ROOT_URLCONF = 'test.urls'
# Testing
if django.VERSION[:2] < (1, 6):
INSTALLED_APPS += ('discover_runner',)
TEST_RUNNER = 'discover_runner.DiscoverRunner'
TEST_DISCOVER_TOP_LEVEL = path.dirname(path.dirname(__file__))
# Cache
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dumper-default'
},
'other': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'dumper-other'
},
}
MIDDLEWARE_CLASSES = (
'dumper.middleware.UpdateCacheMiddleware',
'django.middleware.common.CommonMiddleware',
'dumper.middleware.FetchFromCacheMiddleware',
)
| Use in memory test database | Use in memory test database
| Python | mit | saulshanabrook/django-dumper | ---
+++
@@ -9,7 +9,6 @@
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
- 'NAME': 'imagekit.db',
},
}
ROOT_URLCONF = 'test.urls' |
ffa0fe66512aada62ba2778ea2edba28fd34de88 | demo/sigal.conf.py | demo/sigal.conf.py | # This configuration has been configured for this demo, not all
# normal sigal settings have an effect in this theme.
# ---------------------
# General configuration
# ---------------------
source = 'source'
destination = 'output'
theme = '' # theme is automatically set by the theme plugin.
title = 'Photography'
author = '153957 Photography'
author_link = 'http://arne.delaat.net/'
use_orig = True
# --------------------
# Thumbnail generation
# --------------------
make_thumbs = True
thumb_dir = 'thumbnails'
thumb_size = (280, 140)
thumb_fit = False
albums_sort_attr = 'name'
medias_sort_attr = 'date'
ignore_directories = []
ignore_files = []
# --------
# Plugins
# --------
plugins = ['153957_theme.full_menu', '153957_theme.theme']
| # This configuration has been configured for this demo, not all
# normal sigal settings have an effect in this theme.
# ---------------------
# General configuration
# ---------------------
source = 'source'
destination = 'output'
theme = '' # theme is automatically set by the theme plugin.
title = 'Photography'
author = '153957 Photography'
author_link = 'https://arne.delaat.net/'
use_orig = True
# --------------------
# Thumbnail generation
# --------------------
make_thumbs = True
thumb_dir = 'thumbnails'
thumb_size = (280, 140)
thumb_fit = False
albums_sort_attr = 'name'
medias_sort_attr = 'date'
ignore_directories = []
ignore_files = []
# --------
# Plugins
# --------
plugins = ['153957_theme.full_menu', '153957_theme.theme']
| Replace http by https in demo | Replace http by https in demo
| Python | mit | 153957/153957-theme,153957/153957-theme | ---
+++
@@ -10,7 +10,7 @@
theme = '' # theme is automatically set by the theme plugin.
title = 'Photography'
author = '153957 Photography'
-author_link = 'http://arne.delaat.net/'
+author_link = 'https://arne.delaat.net/'
use_orig = True
# -------------------- |
7b26b893d642d829c55126452fcbebca8cfff806 | test_settings.py | test_settings.py | DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
| DATABASES = {
'default' : {
'ENGINE': 'django.db.backends.sqlite3'
}
}
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.sites',
'django_extensions',
'newsletter',
)
ROOT_URLCONF = 'test_urls'
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
# Enable time-zone support for Django 1.4 (ignored in older versions)
USE_TZ = True
| Enable timezone support in tests. | Enable timezone support in tests.
| Python | agpl-3.0 | ctxis/django-newsletter,ctxis/django-newsletter,dsanders11/django-newsletter,dsanders11/django-newsletter,dsanders11/django-newsletter,viaregio/django-newsletter,viaregio/django-newsletter,ctxis/django-newsletter | ---
+++
@@ -18,3 +18,6 @@
SITE_ID = 1
TEMPLATE_DIRS = ('test_templates', )
+
+# Enable time-zone support for Django 1.4 (ignored in older versions)
+USE_TZ = True |
e586b8ba3bb896dabe97d65d1b564c749faa4d42 | src/ocspdash/web/blueprints/ui.py | src/ocspdash/web/blueprints/ui.py | # -*- coding: utf-8 -*-
"""The OCSPdash homepage UI blueprint."""
from flask import Blueprint, render_template
from ocspdash.web.proxies import manager
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = manager.get_payload()
return render_template('index.html', payload=payload)
# @ui.route('/submit', methods=['POST'])
# def submit():
# """Show the submit view."""
# location_id = int(request.headers['authorization'])
#
# location = current_app.manager.get_location_by_id(location_id)
#
# if not location.activated:
# return abort(403, f'Not activated: {location}')
#
# key = location.pubkey
#
# try:
# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
#
# except nacl.exceptions.BadSignatureError as e:
# return abort(403, f'Bad Signature: {e}')
#
# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
# current_app.manager.insert_payload(decoded_payload)
#
# return '', 204
| # -*- coding: utf-8 -*-
"""Blueprint for non-API endpoints in OCSPdash."""
from flask import Blueprint, render_template
from ocspdash.web.proxies import manager
__all__ = [
'ui',
]
ui = Blueprint('ui', __name__)
@ui.route('/')
def home():
"""Show the user the home view."""
payload = manager.get_payload()
return render_template('index.html', payload=payload)
| Update docstring and remove unused code from UI blueprint. | Update docstring and remove unused code from UI blueprint.
| Python | mit | scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash | ---
+++
@@ -1,6 +1,6 @@
# -*- coding: utf-8 -*-
-"""The OCSPdash homepage UI blueprint."""
+"""Blueprint for non-API endpoints in OCSPdash."""
from flask import Blueprint, render_template
@@ -18,27 +18,3 @@
"""Show the user the home view."""
payload = manager.get_payload()
return render_template('index.html', payload=payload)
-
-# @ui.route('/submit', methods=['POST'])
-# def submit():
-# """Show the submit view."""
-# location_id = int(request.headers['authorization'])
-#
-# location = current_app.manager.get_location_by_id(location_id)
-#
-# if not location.activated:
-# return abort(403, f'Not activated: {location}')
-#
-# key = location.pubkey
-#
-# try:
-# verify_key = VerifyKey(key=key, encoder=URLSafeBase64Encoder)
-# payload = verify_key.verify(request.data, encoder=URLSafeBase64Encoder)
-#
-# except nacl.exceptions.BadSignatureError as e:
-# return abort(403, f'Bad Signature: {e}')
-#
-# decoded_payload = json.loads(base64.urlsafe_b64decode(payload).decode('utf-8'))
-# current_app.manager.insert_payload(decoded_payload)
-#
-# return '', 204 |
7fe3495398b8663cee420e3d3f8ad8d1135e2df9 | api_server/interview/send_email.py | api_server/interview/send_email.py | # -*- encoding: utf-8 -*-
import smtplib
from email.mime.text import MIMEText
from django.conf import settings
def send_text(sender, receiver, subject, body):
msg = MIMEText(body, _charset='utf-8')
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = receiver
msg["Accept-Language"] = "zh-CN"
msg["Accept-Charset"] = "ISO-8859-1,utf-8"
s = smtplib.SMTP('localhost')
s.sendmail(sender, [receiver], msg.as_string())
s.quit()
def send_invitation(receiver, subject, body):
send_text(
settings.INVITATION_SENDER + '@' + settings.EMAIL_DOMAIN,
receiver,
subject,
body
)
def send_candidate_invitation(receiver, l, r):
send_invitation(
receiver,
'邀请您接受面试',
'请点击链接:' + generate_invitation_url(l, r) + '进入面试房间。'
)
def send_interviewer_invitation(receiver, l, r):
send_invitation(
receiver,
'邀请您作为主考官参加面试',
'请点击链接:' + generate_invitation_url(l, r) + '进入面试房间。'
)
def generate_invitation_url(l, r):
return settings.SITE_URL + '/user/login?l=' + l + '&r=' + r
| # -*- encoding: utf-8 -*-
import smtplib
from email.mime.text import MIMEText
from django.conf import settings
def send_text(sender, receiver, subject, body):
msg = MIMEText(body, _charset='utf-8')
msg['Subject'] = subject
msg['From'] = sender
msg['To'] = receiver
msg["Accept-Language"] = "zh-CN"
msg["Accept-Charset"] = "ISO-8859-1,utf-8"
s = smtplib.SMTP('localhost')
s.sendmail(sender, [receiver], msg.as_string())
s.quit()
def send_invitation(receiver, subject, body):
send_text(
settings.INVITATION_SENDER + '@' + settings.EMAIL_DOMAIN,
receiver,
subject,
body
)
def send_candidate_invitation(receiver, l, r):
send_invitation(
receiver,
u'邀请您接受面试',
u'请点击链接:' + generate_invitation_url(l, r) + u'进入面试房间。'
)
def send_interviewer_invitation(receiver, l, r):
send_invitation(
receiver,
u'邀请您作为主考官参加面试',
u'请点击链接:' + generate_invitation_url(l, r) + u'进入面试房间。'
)
def generate_invitation_url(l, r):
return settings.SITE_URL + '/user/login?l=' + l + '&r=' + r
| Fix encoding of email content | Fix encoding of email content
| Python | mit | t103z/interview,t103z/interview,t103z/interview,t103z/interview | ---
+++
@@ -30,16 +30,16 @@
def send_candidate_invitation(receiver, l, r):
send_invitation(
receiver,
- '邀请您接受面试',
- '请点击链接:' + generate_invitation_url(l, r) + '进入面试房间。'
+ u'邀请您接受面试',
+ u'请点击链接:' + generate_invitation_url(l, r) + u'进入面试房间。'
)
def send_interviewer_invitation(receiver, l, r):
send_invitation(
receiver,
- '邀请您作为主考官参加面试',
- '请点击链接:' + generate_invitation_url(l, r) + '进入面试房间。'
+ u'邀请您作为主考官参加面试',
+ u'请点击链接:' + generate_invitation_url(l, r) + u'进入面试房间。'
)
|
fef4e4ce6b05506babc2c325b08aed77af8b9a3c | cryptex/exchange.py | cryptex/exchange.py | class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, frm, to, quantity, price):
raise NotImplementedError
def sell(self, frm, to, quantity, price):
raise NotImplementedError
| class Exchange(object):
def get_markets(self):
"""
Returns a list of tuples of the form ('XXX', 'YYY') representing the
available markets on the exchange, where XXX and YYY are the currency
codes for the base currency and counter currency, respectively.
"""
raise NotImplementedError
def get_my_open_orders(self):
"""
Returns a list of exchanges.order.Order that represent currently
unfulfilled trade orders.
"""
raise NotImplementedError
def get_my_trades(self):
"""
Returns a list of exchanges.trade.Trade that represent all the user's
trades.
"""
raise NotImplementedError
def cancel_order(self, order_id):
"""
Given an order_id, cancels the order associeted with the id. Returns
None.
"""
raise NotImplementedError
def buy(self, market, quantity, price):
raise NotImplementedError
def sell(self, market, quantity, price):
raise NotImplementedError
| Modify buy and sell interface to take market tuples | Modify buy and sell interface to take market tuples
| Python | mit | coink/cryptex | ---
+++
@@ -28,8 +28,8 @@
"""
raise NotImplementedError
- def buy(self, frm, to, quantity, price):
+ def buy(self, market, quantity, price):
raise NotImplementedError
- def sell(self, frm, to, quantity, price):
+ def sell(self, market, quantity, price):
raise NotImplementedError |
e7b7c93efe20ac50256c33ac7b37e4e51151123f | OIPA/api/region/urls.py | OIPA/api/region/urls.py | from django.conf.urls import patterns, url
from api.region import views
urlpatterns = patterns(
'',
url(r'^$', views.RegionList.as_view(), name='region-list'),
url(
r'^/(?P<pk>[0-9]+)$',
views.RegionDetail.as_view(),
name='region-detail'
),
url(
r'^/(?P<pk>[0-9]+)/countries$',
views.RegionCountries.as_view(),
name='region-countries'
),
url(
r'^/(?P<pk>[0-9]+)/activities$',
views.RegionActivities.as_view(),
name='region-activities'
),
)
| from django.conf.urls import patterns, url
from api.region import views
urlpatterns = patterns(
'',
url(r'^$', views.RegionList.as_view(), name='region-list'),
url(
r'^/(?P<pk>[A-Za-z0-9]+)$',
views.RegionDetail.as_view(),
name='region-detail'
),
url(
r'^/(?P<pk>[A-Za-z0-9]+)/countries$',
views.RegionCountries.as_view(),
name='region-countries'
),
url(
r'^/(?P<pk>[A-Za-z0-9]+)/activities$',
views.RegionActivities.as_view(),
name='region-activities'
),
)
| Fix region resolve on non Integer | Fix region resolve on non Integer
| Python | agpl-3.0 | openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,openaid-IATI/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA,zimmerman-zimmerman/OIPA | ---
+++
@@ -6,17 +6,17 @@
'',
url(r'^$', views.RegionList.as_view(), name='region-list'),
url(
- r'^/(?P<pk>[0-9]+)$',
+ r'^/(?P<pk>[A-Za-z0-9]+)$',
views.RegionDetail.as_view(),
name='region-detail'
),
url(
- r'^/(?P<pk>[0-9]+)/countries$',
+ r'^/(?P<pk>[A-Za-z0-9]+)/countries$',
views.RegionCountries.as_view(),
name='region-countries'
),
url(
- r'^/(?P<pk>[0-9]+)/activities$',
+ r'^/(?P<pk>[A-Za-z0-9]+)/activities$',
views.RegionActivities.as_view(),
name='region-activities'
), |
e214e46e748881e6418ffac374a87d6ad30fcfea | packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py | packages/Python/lldbsuite/test/repl/pounwrapping/TestPOUnwrapping.py | # TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
| # TestPOUnwrapping.py
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ------------------------------------------------------------------------------
"""Test that we can correctly handle a nested generic type."""
import lldbsuite.test.lldbrepl as lldbrepl
import lldbsuite.test.decorators as decorators
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
@decorators.expectedFailureAll(
oslist=[
"macosx",
"linux"],
bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> {
var t: T?
var u: U?
init() { t = nil; u = nil }
init(_ x: T, _ y: U) { t = x; u = y }
};(Foo<String,Double>(),Foo<Double,String>(3.14,"hello"))''',
patterns=[
r'\$R0: \(Foo<String, Double>, Foo<Double, String>\) = {',
r'0 = {',
r't = nil',
r'u = nil',
r'1 = {',
r't = 3\.14[0-9]+', 'u = "hello"'])
| Disable test that fails on bot | Disable test that fails on bot
I will look at it.
rdar://35264910
| Python | apache-2.0 | apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb | ---
+++
@@ -18,7 +18,11 @@
class REPLBasicTestCase(lldbrepl.REPLTest):
mydir = lldbrepl.REPLTest.compute_mydir(__file__)
-
+ @decorators.expectedFailureAll(
+ oslist=[
+ "macosx",
+ "linux"],
+ bugnumber="rdar://35264910")
def doTest(self):
self.command(
'''class Foo<T,U> { |
ab845dfd7eb2142ee8bf2fb86f58544e65ac97b8 | auth0/v2/authentication/enterprise.py | auth0/v2/authentication/enterprise.py | from .base import AuthenticationBase
class Enterprise(AuthenticationBase):
def __init__(self, domain):
self.domain = domain
def saml_metadata(self, client_id):
return self.get(url='https://%s/samlp/metadata/%s' % (self.domain,
client_id))
def wsfed_metadata(self):
url = 'https://%s/wsfed/FederationMetadata' \
'/2007-06/FederationMetadata.xml'
return self.get(url=url % self.domain)
| from .base import AuthenticationBase
class Enterprise(AuthenticationBase):
"""Enterprise endpoints.
Args:
domain (str): Your auth0 domain (e.g: username.auth0.com)
"""
def __init__(self, domain):
self.domain = domain
def saml_metadata(self, client_id):
"""Get SAML2.0 Metadata.
Args:
client_id (str): Id of the client to get the SAML metadata for.
"""
return self.get(url='https://%s/samlp/metadata/%s' % (self.domain,
client_id))
def wsfed_metadata(self):
"""Returns the WS-Federation Metadata.
"""
url = 'https://%s/wsfed/FederationMetadata' \
'/2007-06/FederationMetadata.xml'
return self.get(url=url % self.domain)
| Add docstrings in Enterprise class | Add docstrings in Enterprise class
| Python | mit | auth0/auth0-python,auth0/auth0-python | ---
+++
@@ -3,14 +3,29 @@
class Enterprise(AuthenticationBase):
+ """Enterprise endpoints.
+
+ Args:
+ domain (str): Your auth0 domain (e.g: username.auth0.com)
+ """
+
def __init__(self, domain):
self.domain = domain
def saml_metadata(self, client_id):
+ """Get SAML2.0 Metadata.
+
+ Args:
+ client_id (str): Id of the client to get the SAML metadata for.
+ """
+
return self.get(url='https://%s/samlp/metadata/%s' % (self.domain,
client_id))
def wsfed_metadata(self):
+ """Returns the WS-Federation Metadata.
+ """
+
url = 'https://%s/wsfed/FederationMetadata' \
'/2007-06/FederationMetadata.xml'
|
082e34ae8d336d2fe93ea428db0b9a72bbfd649e | templatetags/stringformat.py | templatetags/stringformat.py | from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
return fmt.format(value)
| from django import template
register=template.Library()
@register.filter
def stringformat(value, fmt='{}'):
'''
format the value
'''
if isinstance(value, dict):
return fmt.format(**value)
return fmt.format(value)
| Add named format support if dict | Add named format support if dict
| Python | apache-2.0 | kensonman/webframe,kensonman/webframe,kensonman/webframe | ---
+++
@@ -6,4 +6,6 @@
'''
format the value
'''
+ if isinstance(value, dict):
+ return fmt.format(**value)
return fmt.format(value) |
e502c906e0515dc753c972656751c9fd1ef939f9 | backend/project_name/settings/test.py | backend/project_name/settings/test.py | from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': base_dir_join('db.sqlite3'),
}
}
STATIC_ROOT = base_dir_join('staticfiles')
STATIC_URL = '/static/'
MEDIA_ROOT = base_dir_join('mediafiles')
MEDIA_URL = '/media/'
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# Speed up password hashing
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
# Celery
CELERY_TASK_ALWAYS_EAGER = True
CELERY_TASK_EAGER_PROPAGATES = True
| from .base import * # noqa
SECRET_KEY = 'test'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': base_dir_join('db.sqlite3'),
}
}
STATIC_ROOT = "staticfiles"
STATIC_URL = "/static/"
MEDIA_ROOT = "mediafiles"
MEDIA_URL = "/media/"
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# Speed up password hashing
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
# Celery
CELERY_TASK_ALWAYS_EAGER = True
CELERY_TASK_EAGER_PROPAGATES = True
| Fix ci build by adjust static root and media root path | Fix ci build by adjust static root and media root path
| Python | mit | vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate,vintasoftware/django-react-boilerplate | ---
+++
@@ -10,11 +10,11 @@
}
}
-STATIC_ROOT = base_dir_join('staticfiles')
-STATIC_URL = '/static/'
+STATIC_ROOT = "staticfiles"
+STATIC_URL = "/static/"
-MEDIA_ROOT = base_dir_join('mediafiles')
-MEDIA_URL = '/media/'
+MEDIA_ROOT = "mediafiles"
+MEDIA_URL = "/media/"
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage' |
6b42983178f2a6fc3c485dc23825b748859351e5 | gimlet/backends/sql.py | gimlet/backends/sql.py | from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels'):
engine = create_engine(url)
meta = MetaData()
meta.bind = engine
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
| from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels'):
meta = MetaData(bind=create_engine(url))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
| Simplify creation of MetaData in SQLBackend | Simplify creation of MetaData in SQLBackend
Squash a few lines into one.
| Python | mit | storborg/gimlet | ---
+++
@@ -6,10 +6,7 @@
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels'):
- engine = create_engine(url)
- meta = MetaData()
- meta.bind = engine
-
+ meta = MetaData(bind=create_engine(url))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False, |
1a9e83262201ad7256c5f02c91dc38c2bdccd908 | email_auth/forms.py | email_auth/forms.py | from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import AuthenticationForm as Form
from django.utils.translation import ugettext_lazy as _
class AuthenticationForm(Form):
"""Authenticate a ``User`` by email and password."""
def __init__(self, *args, **kwargs):
super(AuthenticationForm, self).__init__(*args, **kwargs)
del(self.fields['username'])
self.fields.insert(0, 'email', forms.EmailField(label=_(u"Email")))
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
self.user_cache = authenticate(username=email,
password=password)
if self.user_cache is None:
raise forms.ValidationError(_("Please enter a correct email "
"and password. Note that the "
"password is case-sensitive."))
elif not self.user_cache.is_active:
raise forms.ValidationError(_("This account is inactive."))
if self.request:
if not self.request.session.test_cookie_worked():
raise forms.ValidationError(_("Your Web browser doesn't "
"appear to have cookies "
"enabled. Cookies are required "
"for logging in."))
return self.cleaned_data
| from django import forms
from django.contrib.auth import authenticate
from django.contrib.auth.forms import AuthenticationForm as Form
from django.utils.translation import ugettext_lazy as _
class AuthenticationForm(Form):
"""Authenticate a ``User`` by email and password."""
def __init__(self, *args, **kwargs):
super(AuthenticationForm, self).__init__(*args, **kwargs)
del(self.fields['username'])
self.fields.insert(0, 'email', forms.CharField(label=_(u"Email")))
def clean(self):
email = self.cleaned_data.get('email')
password = self.cleaned_data.get('password')
if email and password:
self.user_cache = authenticate(username=email,
password=password)
if self.user_cache is None:
raise forms.ValidationError(_("Please enter a correct email "
"and password. Note that the "
"password is case-sensitive."))
elif not self.user_cache.is_active:
raise forms.ValidationError(_("This account is inactive."))
if self.request:
if not self.request.session.test_cookie_worked():
raise forms.ValidationError(_("Your Web browser doesn't "
"appear to have cookies "
"enabled. Cookies are required "
"for logging in."))
return self.cleaned_data
| Allow entry of usernames in email field. | Allow entry of usernames in email field.
| Python | bsd-3-clause | strange/django-email-auth | ---
+++
@@ -8,7 +8,7 @@
def __init__(self, *args, **kwargs):
super(AuthenticationForm, self).__init__(*args, **kwargs)
del(self.fields['username'])
- self.fields.insert(0, 'email', forms.EmailField(label=_(u"Email")))
+ self.fields.insert(0, 'email', forms.CharField(label=_(u"Email")))
def clean(self):
email = self.cleaned_data.get('email') |
cf44260d057e289a089c1c3c440e5f64366facfa | scraping/urls/scrape_fish.py | scraping/urls/scrape_fish.py | import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
url_dict = {}
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish,sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
url_dict[fish] = urls
url_df = pd.DataFrame(url_dict)
url_df.to_csv(sys.argv[3], sep='|', index=False) | import pandas as pd
from subprocess import check_output
import sys
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
dfs = []
for fish in fish_df.fish:
output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
out_df = pd.concat(dfs)
out_df.to_csv(sys.argv[3], sep='|', index=False)
| Update scrape fish to handle different size url sets and to specify to google that we want fish | Update scrape fish to handle different size url sets and to specify to google that we want fish
| Python | mit | matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai,matthew-sochor/fish.io.ai | ---
+++
@@ -4,12 +4,12 @@
fish_df = pd.read_csv(sys.argv[1],names=['fish'])
-url_dict = {}
+dfs = []
for fish in fish_df.fish:
- output = check_output(['node','scrape_image_urls.js',fish,sys.argv[2]])
+ output = check_output(['node','scrape_image_urls.js',fish + ' fish',sys.argv[2]])
splits = str(output).replace('\\n','').split(' url: ')
urls = [s.split(', width')[0][1:-1] for s in splits[1:]]
- url_dict[fish] = urls
+ dfs.append(pd.DataFrame({'fish': fish, 'url': urls}))
-url_df = pd.DataFrame(url_dict)
-url_df.to_csv(sys.argv[3], sep='|', index=False)
+out_df = pd.concat(dfs)
+out_df.to_csv(sys.argv[3], sep='|', index=False) |
a41f468f667869a433e5fbb2bd2dde797ae24586 | examples/img_client.py | examples/img_client.py | #!/usr/bin/env python
"""
This example demonstrates a client fetching images from a server running img_server.py.
The first packet contains the number of chunks to expect, and then that number of chunks is read.
Lost packets are not handled in any way.
"""
from nuts import UDPAuthChannel
channel = UDPAuthChannel('secret') # Re-key handling? Load key from file, or mutable struct?
with channel.connect( ('10.0.0.1', 8001) ) as session:
session.send('Take pic!')
msg = session.receive()
num_chunks = int(msg)
with open('latest_img.jpg', 'wb') as img_fh:
for i in range(num_chunks):
chunk = session.receive()
print('got chunk %d of %d' % (i + 1, num_chunks))
img_fh.write(chunk)
| #!/usr/bin/env python
"""
This example demonstrates a client fetching images from a server running img_server.py.
The first packet contains the number of chunks to expect, and then that number of chunks is read.
Lost packets are not handled in any way.
"""
from nuts import UDPAuthChannel
channel = UDPAuthChannel('secret', timeout=4)
with channel.connect( ('10.0.0.1', 8001) ) as session:
session.send('Take pic!')
msg = session.receive()
num_chunks = int(msg)
with open('latest_img.jpg', 'wb') as img_fh:
for i in range(num_chunks):
chunk = session.receive()
print('got chunk %d of %d' % (i + 1, num_chunks))
img_fh.write(chunk)
| Set 4s timeout for img client | Set 4s timeout for img client
| Python | mit | thusoy/nuts-auth,thusoy/nuts-auth | ---
+++
@@ -9,7 +9,7 @@
from nuts import UDPAuthChannel
-channel = UDPAuthChannel('secret') # Re-key handling? Load key from file, or mutable struct?
+channel = UDPAuthChannel('secret', timeout=4)
with channel.connect( ('10.0.0.1', 8001) ) as session:
session.send('Take pic!')
msg = session.receive() |
3cc8f2f212199d956c0132cc0aa12bd33e94e8dc | tests/drivers/test-facets.py | tests/drivers/test-facets.py | import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v, type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
| import pyxb.binding.generate
import pyxb.utils.domutils
from xml.dom import Node
import os.path
schema_path = '%s/../schemas/test-facets.xsd' % (os.path.dirname(__file__),)
code = pyxb.binding.generate.GeneratePython(schema_location=schema_path)
rv = compile(code, 'test', 'exec')
eval(rv)
from pyxb.exceptions_ import *
import unittest
class TestFacets (unittest.TestCase):
def testQuantity (self):
xml = '<quantity xmlns="URN:test-facets">35</quantity>'
instance = CreateFromDOM(pyxb.utils.domutils.StringToDOM(xml).documentElement)
self.assertEqual(35, instance)
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
self.assertEqual(v.typeDefinition(), type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
if __name__ == '__main__':
unittest.main()
| Change to support new binding model | Change to support new binding model
| Python | apache-2.0 | jonfoster/pyxb2,jonfoster/pyxb-upstream-mirror,balanced/PyXB,jonfoster/pyxb1,jonfoster/pyxb-upstream-mirror,balanced/PyXB,CantemoInternal/pyxb,jonfoster/pyxb-upstream-mirror,pabigot/pyxb,jonfoster/pyxb2,balanced/PyXB,CantemoInternal/pyxb,pabigot/pyxb,jonfoster/pyxb1,CantemoInternal/pyxb,jonfoster/pyxb2 | ---
+++
@@ -21,7 +21,7 @@
for (k,v) in globals().items():
if k.startswith('_STD_ANON'):
break
- self.assertEqual(v, type(instance))
+ self.assertEqual(v.typeDefinition(), type(instance))
self.assertRaises(Exception, v, -52)
self.assertRaises(Exception, v, 100)
|
a2a4b9f400fa6d62d3671d69d9742d06c165f7da | amaranth/ml/__main__.py | amaranth/ml/__main__.py | # Lint as: python3
"""This script is used to run the amaranth.ml module as an executable.
For now, this script just delegates it's main to amaranth.ml.train
"""
from amaranth.ml import train
def main():
train.main()
if __name__ == '__main__':
main()
| # Lint as: python3
"""This script is used to run the amaranth.ml module as an executable.
For now, this script just delegates it's main to amaranth.ml.train
"""
from amaranth.ml import train
from amaranth.ml import interactive
def main():
# List of possible functions this module can perform
# List elements should be tuples of str titles and functions to call
options = [
('Training: Train ML model on the dataset', train.main),
(('Interactive: Interact with the ML model by giving it strings to '
'classify'), interactive.main),
]
# Prompt user
print('Please enter the number of the option you\'d like to choose.')
print('---')
for idx, (option_title, _) in enumerate(options):
print(f'{idx}) {option_title}')
# Parse user's input choice
choice = None
while choice is None:
try:
user_in = int(input())
if 0 <= user_in < len(options):
choice = user_in
else:
print('Please enter an option from the list above.')
except ValueError:
print('Please enter a valid integer.')
# Run user's choice
options[choice][1]()
if __name__ == '__main__':
main()
| Allow user to choose what to run in amaranth.ml | Allow user to choose what to run in amaranth.ml
Choose between the executables for training the model or interacting
with the model.
| Python | apache-2.0 | googleinterns/amaranth,googleinterns/amaranth | ---
+++
@@ -5,10 +5,38 @@
"""
from amaranth.ml import train
+from amaranth.ml import interactive
def main():
- train.main()
+ # List of possible functions this module can perform
+ # List elements should be tuples of str titles and functions to call
+ options = [
+ ('Training: Train ML model on the dataset', train.main),
+ (('Interactive: Interact with the ML model by giving it strings to '
+ 'classify'), interactive.main),
+ ]
+
+ # Prompt user
+ print('Please enter the number of the option you\'d like to choose.')
+ print('---')
+ for idx, (option_title, _) in enumerate(options):
+ print(f'{idx}) {option_title}')
+
+ # Parse user's input choice
+ choice = None
+ while choice is None:
+ try:
+ user_in = int(input())
+ if 0 <= user_in < len(options):
+ choice = user_in
+ else:
+ print('Please enter an option from the list above.')
+ except ValueError:
+ print('Please enter a valid integer.')
+
+ # Run user's choice
+ options[choice][1]()
if __name__ == '__main__': |
fa0d138ce465efdd630b83ba4a7ee10888a68b4a | alg_factorial.py | alg_factorial.py | """Factorial series:
1!, 2!, 3!, ...
- Factorial(1) = 1! = 1
- Factorial(2) = 2! = 2
- Factorial(n) = n! = n * (n - 1)! = n * Factorial(n - 1)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of Fibonacci series, Fn, by recursion."""
pass
def factorial_memo(n):
"""Get the nth number of Fibonacci series, Fn, by memorization."""
pass
def factorial_dp(n):
"""Get the nth number of Fibonacci series by dynamic programming."""
pass
def main():
pass
if __name__ == '__main__':
main()
| """Factorial series:
1!, 2!, 3!, ...
- Factorial(1) = 1! = 1
- Factorial(2) = 2! = 2
- Factorial(n) = n! = n * (n - 1)! = n * Factorial(n - 1)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def factorial_recur(n):
"""Get the nth number of factorial series, Fn, by recursion.
- Time complexity: Fn - 1 = O(Fn); too fast.
- Space complexity: O(n).
"""
if n <= 1:
return 1
else:
return n * factorial_recur(n - 1)
def factorial_memo(n):
"""Get the nth number of factorial series, Fn, by memorization.
- Time complexity: O(n).
- Space complexity: O(n).
"""
fn_d = {}
fn_d[0] = 1
fn_d[1] = 1
for n in xrange(2, n + 1):
fn_d[n] = n * fn_d[n - 1]
return fn_d[n]
def factorial_dp(n):
"""Get the nth number of factorial series by dynamic programming.
- Time complexity is still O(n), like factorial_memo().
- Space complexity is O(1), improving a lot.
"""
fn = 1
for i in xrange(2, n + 1):
fn *= i
return fn
def main():
import time
n = 10
print('{}th number of factorial series:'.format(n))
start_time = time.time()
print('By recursion: {}'.format(factorial_recur(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By memorization: {}'.format(factorial_memo(n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('By dynamic programming: {}'.format(factorial_dp(n)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido | Complete factorial_recur(), factorial_memo() & factorial_dp() from Hokaido
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | ---
+++
@@ -12,22 +12,59 @@
from __future__ import division
def factorial_recur(n):
- """Get the nth number of Fibonacci series, Fn, by recursion."""
- pass
+ """Get the nth number of factorial series, Fn, by recursion.
+
+ - Time complexity: Fn - 1 = O(Fn); too fast.
+ - Space complexity: O(n).
+ """
+ if n <= 1:
+ return 1
+ else:
+ return n * factorial_recur(n - 1)
def factorial_memo(n):
- """Get the nth number of Fibonacci series, Fn, by memorization."""
- pass
-
+ """Get the nth number of factorial series, Fn, by memorization.
+
+ - Time complexity: O(n).
+ - Space complexity: O(n).
+ """
+ fn_d = {}
+ fn_d[0] = 1
+ fn_d[1] = 1
+ for n in xrange(2, n + 1):
+ fn_d[n] = n * fn_d[n - 1]
+ return fn_d[n]
def factorial_dp(n):
- """Get the nth number of Fibonacci series by dynamic programming."""
- pass
+ """Get the nth number of factorial series by dynamic programming.
+
+ - Time complexity is still O(n), like factorial_memo().
+ - Space complexity is O(1), improving a lot.
+ """
+ fn = 1
+ for i in xrange(2, n + 1):
+ fn *= i
+ return fn
def main():
- pass
+ import time
+ n = 10
+
+ print('{}th number of factorial series:'.format(n))
+
+ start_time = time.time()
+ print('By recursion: {}'.format(factorial_recur(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By memorization: {}'.format(factorial_memo(n)))
+ print('Time: {}'.format(time.time() - start_time))
+
+ start_time = time.time()
+ print('By dynamic programming: {}'.format(factorial_dp(n)))
+ print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main() |
6115590cde40c05b92cb6fa9e7358e08c964f4d1 | nipype/utils/config.py | nipype/utils/config.py | '''
Created on 20 Apr 2010
@author: Chris Filo Gorgolewski
'''
import ConfigParser, os
from io import StringIO
default_cfg = StringIO("""
[logging]
workflow_level = INFO
node_level = INFO
filemanip_level = INFO
[execution]
stop_on_first_crash = false
hash_method = content
""")
config = ConfigParser.ConfigParser()
config.readfp(default_cfg)
config.read(['nipype.cfg', os.path.expanduser('~/.nipype.cfg')])
| '''
Created on 20 Apr 2010
@author: Chris Filo Gorgolewski
'''
import ConfigParser, os
from StringIO import StringIO
default_cfg = StringIO("""
[logging]
workflow_level = INFO
node_level = INFO
filemanip_level = INFO
[execution]
stop_on_first_crash = false
hash_method = content
""")
config = ConfigParser.ConfigParser()
config.readfp(default_cfg)
config.read(['nipype.cfg', os.path.expanduser('~/.nipype.cfg')])
| Use StringIO.StringIO instead of the io.StringIO. | Use StringIO.StringIO instead of the io.StringIO.
The io library is new to Python 2.6, we still support 2.5. And the doc for
the io library encourages us to continue to use StringIO.StringIO:
http://docs.python.org/whatsnew/2.6.html#pep-3116-new-i-o-library
git-svn-id: 24f545668198cdd163a527378499f2123e59bf9f@1333 ead46cd0-7350-4e37-8683-fc4c6f79bf00
| Python | bsd-3-clause | blakedewey/nipype,wanderine/nipype,fprados/nipype,gerddie/nipype,iglpdc/nipype,mick-d/nipype,arokem/nipype,rameshvs/nipype,iglpdc/nipype,FredLoney/nipype,mick-d/nipype_source,JohnGriffiths/nipype,dmordom/nipype,glatard/nipype,carlohamalainen/nipype,arokem/nipype,blakedewey/nipype,glatard/nipype,rameshvs/nipype,carolFrohlich/nipype,pearsonlab/nipype,sgiavasis/nipype,wanderine/nipype,mick-d/nipype_source,Leoniela/nipype,glatard/nipype,pearsonlab/nipype,pearsonlab/nipype,JohnGriffiths/nipype,arokem/nipype,mick-d/nipype,glatard/nipype,dmordom/nipype,carlohamalainen/nipype,sgiavasis/nipype,gerddie/nipype,FCP-INDI/nipype,grlee77/nipype,christianbrodbeck/nipype,Leoniela/nipype,grlee77/nipype,FCP-INDI/nipype,mick-d/nipype_source,wanderine/nipype,FCP-INDI/nipype,fprados/nipype,gerddie/nipype,rameshvs/nipype,iglpdc/nipype,grlee77/nipype,carolFrohlich/nipype,carolFrohlich/nipype,blakedewey/nipype,satra/NiPypeold,dgellis90/nipype,satra/NiPypeold,mick-d/nipype,sgiavasis/nipype,wanderine/nipype,gerddie/nipype,mick-d/nipype,carolFrohlich/nipype,JohnGriffiths/nipype,dgellis90/nipype,arokem/nipype,JohnGriffiths/nipype,christianbrodbeck/nipype,dgellis90/nipype,iglpdc/nipype,FredLoney/nipype,Leoniela/nipype,blakedewey/nipype,FCP-INDI/nipype,rameshvs/nipype,dmordom/nipype,carlohamalainen/nipype,FredLoney/nipype,sgiavasis/nipype,pearsonlab/nipype,grlee77/nipype,fprados/nipype,dgellis90/nipype | ---
+++
@@ -4,7 +4,7 @@
@author: Chris Filo Gorgolewski
'''
import ConfigParser, os
-from io import StringIO
+from StringIO import StringIO
default_cfg = StringIO("""
[logging] |
cd7a8b999280e0e834e196066068f78375cfb88a | water_level/water_level.py | water_level/water_level.py | '''
Created on Aug 1, 2017
@author: alkaitz
'''
if __name__ == '__main__':
pass | '''
Created on Aug 1, 2017
@author: alkaitz
'''
'''
[3 2 3] -> 1
'''
if __name__ == '__main__':
pass | Include quick sample for water level identification | Include quick sample for water level identification
| Python | mit | alkaitz/general-programming | ---
+++
@@ -4,5 +4,8 @@
@author: alkaitz
'''
+'''
+ [3 2 3] -> 1
+'''
if __name__ == '__main__':
pass |
3d1dba15097ac8b746c138b75fe1763aa4b8ac12 | footballseason/urls.py | footballseason/urls.py | from django.conf.urls import url
from . import views
urlpatterns = [
# eg: /footballseason/
url(r'^$', views.index, name='index'),
# eg: /footballseason/3
url(r'^(?P<week_id>[0-9]+)/$', views.display, name='display'),
# eg: /footballseason/3/submit
url(r'^(?P<week_id>[0-9]+)/submit$', views.submit, name='submit'),
# eg: /footballseason/3/vote
url(r'^(?P<week_id>[0-9]+)/vote$', views.vote, name='vote'),
# eg: /footballseason/update
url(r'^update$', views.update, name='update'),
# eg: /footballseason/records
url(r'^records$', views.records_default, name='records_default'),
# eg: /footballseason/records/2015
url(r'^records/(?P<season>[0-9]+)$', views.records_by_season, name='records_by_season'),
# eg: /footballseason/records/2015/3
url(r'^records/(?P<season>[0-9]+)/(?P<week>[0-9]+)$', views.records_by_week, name='records_by_week'),
]
| from django.conf.urls import url
from . import views
urlpatterns = [
# eg: /footballseason/
url(r'^$', views.index, name='index'),
# eg: /footballseason/3/
url(r'^(?P<week_id>[0-9]+)/$', views.display, name='display'),
# eg: /footballseason/3/submit/
url(r'^(?P<week_id>[0-9]+)/submit/$', views.submit, name='submit'),
# eg: /footballseason/3/vote/
url(r'^(?P<week_id>[0-9]+)/vote/$', views.vote, name='vote'),
# eg: /footballseason/update/
url(r'^update/$', views.update, name='update'),
# eg: /footballseason/records/
url(r'^records/$', views.records_default, name='records_default'),
# eg: /footballseason/records/2015/
url(r'^records/(?P<season>[0-9]+)/$', views.records_by_season, name='records_by_season'),
# eg: /footballseason/records/2015/3/
url(r'^records/(?P<season>[0-9]+)/(?P<week>[0-9]+)/$', views.records_by_week, name='records_by_week'),
]
| Fix URLs so APPEND_SLASH works | Fix URLs so APPEND_SLASH works
| Python | mit | mkokotovich/footballpicks,mkokotovich/footballpicks,mkokotovich/footballpicks,mkokotovich/footballpicks | ---
+++
@@ -6,24 +6,24 @@
# eg: /footballseason/
url(r'^$', views.index, name='index'),
- # eg: /footballseason/3
+ # eg: /footballseason/3/
url(r'^(?P<week_id>[0-9]+)/$', views.display, name='display'),
- # eg: /footballseason/3/submit
- url(r'^(?P<week_id>[0-9]+)/submit$', views.submit, name='submit'),
+ # eg: /footballseason/3/submit/
+ url(r'^(?P<week_id>[0-9]+)/submit/$', views.submit, name='submit'),
- # eg: /footballseason/3/vote
- url(r'^(?P<week_id>[0-9]+)/vote$', views.vote, name='vote'),
+ # eg: /footballseason/3/vote/
+ url(r'^(?P<week_id>[0-9]+)/vote/$', views.vote, name='vote'),
- # eg: /footballseason/update
- url(r'^update$', views.update, name='update'),
+ # eg: /footballseason/update/
+ url(r'^update/$', views.update, name='update'),
- # eg: /footballseason/records
- url(r'^records$', views.records_default, name='records_default'),
+ # eg: /footballseason/records/
+ url(r'^records/$', views.records_default, name='records_default'),
- # eg: /footballseason/records/2015
- url(r'^records/(?P<season>[0-9]+)$', views.records_by_season, name='records_by_season'),
+ # eg: /footballseason/records/2015/
+ url(r'^records/(?P<season>[0-9]+)/$', views.records_by_season, name='records_by_season'),
- # eg: /footballseason/records/2015/3
- url(r'^records/(?P<season>[0-9]+)/(?P<week>[0-9]+)$', views.records_by_week, name='records_by_week'),
+ # eg: /footballseason/records/2015/3/
+ url(r'^records/(?P<season>[0-9]+)/(?P<week>[0-9]+)/$', views.records_by_week, name='records_by_week'),
] |
b7d8daf8377fff86662bb8721b1008667e8a4767 | gstorage/__init__.py | gstorage/__init__.py | # -*- coding: utf-8 -*-
"""
"""
__version__ = "0.0.1.1"
__author__ = "Fyndiq"
__license__ = "MIT"
__copyright__ = "Copyright 2016 Fyndiq AB"
| # -*- coding: utf-8 -*-
"""
django-gstorage
~~~~~~~~~~~~~~~
A plug-and-play replacement for FileSystemStorage but
using Google storage for persistence
"""
__version__ = '0.0.1.1'
__author__ = 'Fyndiq'
__license__ = 'MIT'
__copyright__ = 'Copyright 2016 Fyndiq AB'
| Add a one-liner about module functionality and tidy double quotes | Add a one-liner about module functionality and tidy double quotes
| Python | mit | fyndiq/django-gstorage | ---
+++
@@ -1,8 +1,13 @@
# -*- coding: utf-8 -*-
"""
+ django-gstorage
+ ~~~~~~~~~~~~~~~
+
+ A plug-and-play replacement for FileSystemStorage but
+ using Google storage for persistence
"""
-__version__ = "0.0.1.1"
-__author__ = "Fyndiq"
-__license__ = "MIT"
-__copyright__ = "Copyright 2016 Fyndiq AB"
+__version__ = '0.0.1.1'
+__author__ = 'Fyndiq'
+__license__ = 'MIT'
+__copyright__ = 'Copyright 2016 Fyndiq AB' |
cd67e3d2419fd84fd11e469d17562bba19131099 | looknsay.py | looknsay.py | #!/usr/bin/env python3
from argparse import ArgumentParser
from itertools import groupby
def iterate(n):
result = 0
digits = [int(i) for i in str(n)]
for k, g in groupby(digits):
result = result * 100 + len(tuple(g)) * 10 + k
return result
def compute(n, i=20):
yield n
x = n
for i in range(0, i):
x = iterate(x)
yield x
if __name__ == "__main__":
parser = ArgumentParser(description='Generate sequence of Look-and-Say numbers.')
parser.add_argument('seed', type=int, nargs='?', default=1,
help='sequence seed')
parser.add_argument('-i', '--iterations', dest='iterations', type=int, default=20,
help='number of iterations')
args = parser.parse_args()
for step in compute(args.seed, args.iterations):
print(step) | #!/usr/bin/env python3
from argparse import ArgumentParser
from itertools import groupby
def iterate(n):
result = 0
digits = [int(i) for i in str(n)]
for k, g in groupby(digits):
result = result * 100 + len(tuple(g)) * 10 + k
return result
def compute(n, i=20):
yield n
x = n
for i in range(0, i):
x = iterate(x)
yield x
def last_item(iter):
item = None
for item in iter:
pass
return item
if __name__ == "__main__":
parser = ArgumentParser(description='Generate sequence of Look-and-Say numbers.')
parser.add_argument('seed', type=int, nargs='?', default=1,
help='sequence seed')
parser.add_argument('-i', '--iterations', dest='iterations', type=int, default=20,
help='number of iterations')
parser.add_argument('-last', '--only-last', action='store_true',
help='print only last step of the iteration')
args = parser.parse_args()
computer = compute(args.seed, args.iterations)
if not args.only_last:
for step in computer:
print(step)
else:
print(last_item(computer)) | Add option to print only last step of iteration. | Add option to print only last step of iteration.
| Python | unlicense | jajakobyly/looknsay | ---
+++
@@ -20,14 +20,27 @@
yield x
+def last_item(iter):
+ item = None
+ for item in iter:
+ pass
+ return item
+
+
if __name__ == "__main__":
parser = ArgumentParser(description='Generate sequence of Look-and-Say numbers.')
parser.add_argument('seed', type=int, nargs='?', default=1,
help='sequence seed')
parser.add_argument('-i', '--iterations', dest='iterations', type=int, default=20,
help='number of iterations')
+ parser.add_argument('-last', '--only-last', action='store_true',
+ help='print only last step of the iteration')
args = parser.parse_args()
+ computer = compute(args.seed, args.iterations)
- for step in compute(args.seed, args.iterations):
- print(step)
+ if not args.only_last:
+ for step in computer:
+ print(step)
+ else:
+ print(last_item(computer)) |
72d193956d1be4e0fbbe92b417d9022770de823b | fabfile/__init__.py | fabfile/__init__.py | """Django project fabfile."""
import os
from fabric.api import puts, task
from fabric.utils import indent
from deploy import deploy_to_heroku, prepare_to_deploy, tag_project
from publish import update_staticfiles, upload_staticfiles
from settings import PROJECT_ENVIRONMENT, PROJECT_ROOT, SITE_NAME, STATIC_ROOT
@task(default=True)
def info():
"""Display information about the project configuration."""
puts("Django project for site '%s' located at '%s':" % (SITE_NAME, PROJECT_ROOT))
puts(indent('PROJECT_ENVIRONMENT = %s' % PROJECT_ENVIRONMENT, 4))
puts(indent('DJANGO_SETTINGS_MODULE = %s'
% os.environ.get('DJANGO_SETTINGS_MODULE', ''), 4))
puts(indent('STATIC_ROOT = %s' % STATIC_ROOT, 4))
@task
def publish():
"""Publish assets to Amazon S3."""
update_staticfiles()
upload_staticfiles()
@task
def deploy():
"""Publish and deploy the site."""
prepare_to_deploy()
publish()
# TODO: Add support for other environments.
tag_project('production')
deploy_to_heroku()
| """Django project fabfile."""
import os
from fabric.api import puts, task
from fabric.utils import indent
from deploy import deploy_to_heroku, prepare_to_deploy, tag_project
from publish import update_staticfiles, upload_staticfiles
from settings import PROJECT_ENVIRONMENT, PROJECT_ROOT, SITE_NAME, STATIC_ROOT
@task(default=True)
def info():
"""Display information about the project configuration."""
puts("Django project for site '%s' located at '%s':" % (SITE_NAME, PROJECT_ROOT))
puts(indent('PROJECT_ENVIRONMENT = %s' % PROJECT_ENVIRONMENT, 4))
puts(indent('DJANGO_SETTINGS_MODULE = %s'
% os.environ.get('DJANGO_SETTINGS_MODULE', ''), 4))
puts(indent('STATIC_ROOT = %s' % STATIC_ROOT, 4))
@task
def publish():
"""Publish assets to Amazon S3."""
update_staticfiles()
upload_staticfiles()
@task
def deploy(skip_publish=False):
"""Publish and deploy the site."""
prepare_to_deploy()
if not skip_publish:
publish()
# TODO: Add support for other environments.
tag_project('production')
deploy_to_heroku()
| Add a skip_publish flag to the deploy task. | Add a skip_publish flag to the deploy task.
| Python | mit | PrecisionMojo/pm-www,PrecisionMojo/pm-www | ---
+++
@@ -27,10 +27,11 @@
@task
-def deploy():
+def deploy(skip_publish=False):
"""Publish and deploy the site."""
prepare_to_deploy()
- publish()
+ if not skip_publish:
+ publish()
# TODO: Add support for other environments.
tag_project('production')
deploy_to_heroku() |
e0055226a9d10d726fcde948ea72ac2e84f8109a | falafel/__init__.py | falafel/__init__.py | import os
from .core import LogFileOutput, Mapper, IniConfigFile, LegacyItemAccess # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.11.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
| import os
from .core import LogFileOutput, Mapper, IniConfigFile, LegacyItemAccess # noqa: F401
from .core import FileListing # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401
__here__ = os.path.dirname(os.path.abspath(__file__))
VERSION = "1.11.0"
NAME = "falafel"
with open(os.path.join(__here__, "RELEASE")) as f:
RELEASE = f.read().strip()
with open(os.path.join(__here__, "COMMIT")) as f:
COMMIT = f.read().strip()
def get_nvr():
return "{0}-{1}-{2}".format(NAME, VERSION, RELEASE)
RULES_STATUS = {}
"""
Mapping of dictionaries containing nvr and commitid for each rule repo included
in this instance
{"rule_repo_1": {"version": nvr(), "commit": sha1}}
"""
def add_status(name, nvr, commit):
"""
Rule repositories should call this method in their package __init__ to
register their version information.
"""
RULES_STATUS[name] = {"version": nvr, "commit": commit}
| Add FileListing to objects we export | Add FileListing to objects we export
| Python | apache-2.0 | RedHatInsights/insights-core,RedHatInsights/insights-core | ---
+++
@@ -1,5 +1,6 @@
import os
from .core import LogFileOutput, Mapper, IniConfigFile, LegacyItemAccess # noqa: F401
+from .core import FileListing # noqa: F401
from .core.plugins import mapper, reducer, make_response, make_metadata # noqa: F401
from .mappers import get_active_lines # noqa: F401
from .util import defaults, parse_table # noqa: F401 |
25f2692d5865a76533f8087c2f566299ae777c8e | conman/redirects/views.py | conman/redirects/views.py | from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
| from django.views.generic import RedirectView
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url.
Save the route's redirect type for use by RedirectView.
"""
redirect = kwargs['route']
self.permanent = redirect.permanent
return redirect.target.url
| Set variable to future default | Set variable to future default
Deprecation warning was:
RemovedInDjango19Warning: Default value of 'RedirectView.permanent' will
change from True to False in Django 1.9. Set an explicit value to
silence this warning.
| Python | bsd-2-clause | meshy/django-conman,meshy/django-conman,Ian-Foote/django-conman | ---
+++
@@ -3,6 +3,8 @@
class RouteRedirectView(RedirectView):
"""Redirect to the target Route."""
+ permanent = True # Set to django 1.9's default to avoid RemovedInDjango19Warning
+
def get_redirect_url(self, *args, **kwargs):
"""
Return the route's target url. |
f28d1c7b1daba6a2ca4d8b2537e861d8146bc998 | src/competition/validators.py | src/competition/validators.py | from django.core.exceptions import ValidationError
from django.template.defaultfilters import slugify
import re
NAME_REGULAR_EXPRESSION = re.compile(r'^[a-zA-Z0-9]+[\w\-\.: ]*$')
def greater_than_zero(value):
"""Checks if value is greater than zero"""
if value <= 0:
raise ValidationError("Value must be greater than zero.")
def non_negative(value):
"""Checks if value is greater than or equal to zero"""
if value < 0:
raise ValidationError("Value must be greater than or equal to zero.")
def validate_name(value):
"""Ensures that a string matches a regular expression and that the
slugified version of a string is not the empty string"""
if value.lower() == "bye":
msg = "Name cannot be 'Bye'"
raise ValidationError(msg)
if NAME_REGULAR_EXPRESSION.match(value) is None:
msg = "Team name doesn't start with an alphanumeric or contains " \
"invalid characters."
raise ValidationError(msg)
if slugify(value) == "":
msg = "Name must contain some letters or numbers."
raise ValidationError(msg)
| from django.core.exceptions import ValidationError
from django.template.defaultfilters import slugify
import re
NAME_REGULAR_EXPRESSION = re.compile(r'^[a-zA-Z0-9]+[\w\-\.: ]*$')
def greater_than_zero(value):
"""Checks if value is greater than zero"""
if value <= 0:
raise ValidationError("Value must be greater than zero.")
def non_negative(value):
"""Checks if value is greater than or equal to zero"""
if value < 0:
raise ValidationError("Value must be greater than or equal to zero.")
def validate_name(value):
"""Ensures that a string matches a regular expression and that the
slugified version of a string is not the empty string"""
if value.lower().strip() == "bye":
msg = "Name cannot be 'Bye'"
raise ValidationError(msg)
if NAME_REGULAR_EXPRESSION.match(value) is None:
msg = "Team name doesn't start with an alphanumeric or contains " \
"invalid characters."
raise ValidationError(msg)
if slugify(value) == "":
msg = "Name must contain some letters or numbers."
raise ValidationError(msg)
| Remove whitespace from name when checking for bye | Remove whitespace from name when checking for bye
| Python | bsd-3-clause | michaelwisely/django-competition,michaelwisely/django-competition,michaelwisely/django-competition | ---
+++
@@ -20,7 +20,7 @@
def validate_name(value):
"""Ensures that a string matches a regular expression and that the
slugified version of a string is not the empty string"""
- if value.lower() == "bye":
+ if value.lower().strip() == "bye":
msg = "Name cannot be 'Bye'"
raise ValidationError(msg)
if NAME_REGULAR_EXPRESSION.match(value) is None: |
38e25c5219476f51ca391fcea5e629177f4dde84 | daisyproducer/documents/storage.py | daisyproducer/documents/storage.py | """
A storage implementation that overwrites exiting files in the storage
See "Writing a custom storage system"
(https://docs.djangoproject.com/en/1.3/howto/custom-file-storage/) and
the discussion on stackoverflow on "ImageField overwrite image file"
(http://stackoverflow.com/questions/9522759/imagefield-overwrite-image-file)
"""
import os.path
from django.core.files.storage import FileSystemStorage
class OverwriteStorage(FileSystemStorage):
def get_available_name(self, name):
return name
def _save(self, name, content):
full_path = self.path(name)
# make sure an existing file is replaced by removing the
# original file first
if os.path.exists(full_path):
os.remove(full_path)
return super(OverwriteStorage, self)._save(name, content)
| """
A storage implementation that overwrites exiting files in the storage
See "Writing a custom storage system"
(https://docs.djangoproject.com/en/1.3/howto/custom-file-storage/) and
the discussion on stackoverflow on "ImageField overwrite image file"
(http://stackoverflow.com/questions/9522759/imagefield-overwrite-image-file)
"""
import os.path
from django.core.files.storage import FileSystemStorage
class OverwriteStorage(FileSystemStorage):
def get_available_name(self, name, max_length=None):
return name
def _save(self, name, content):
full_path = self.path(name)
# make sure an existing file is replaced by removing the
# original file first
if os.path.exists(full_path):
os.remove(full_path)
return super(OverwriteStorage, self)._save(name, content)
| Migrate to new Storage API | Migrate to new Storage API
for Django 1.8
| Python | agpl-3.0 | sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer,sbsdev/daisyproducer | ---
+++
@@ -12,7 +12,7 @@
class OverwriteStorage(FileSystemStorage):
- def get_available_name(self, name):
+ def get_available_name(self, name, max_length=None):
return name
def _save(self, name, content): |
07d8c7be45e80741fc024b14fff8f0ae8a20c4c5 | instance/tasks.py | instance/tasks.py | # -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from huey.djhuey import crontab, db_periodic_task, task
from instance.github import get_watched_pr_list
from .models import OpenEdXInstance
# Logging #####################################################################
import logging
logger = logging.getLogger(__name__)
# Tasks #######################################################################
@task()
def provision_sandbox_instance(fork_name=None, **instance_field_dict):
logger.info('Create local instance object')
instance, _ = OpenEdXInstance.objects.get_or_create(**instance_field_dict)
if fork_name:
instance.set_fork_name(fork_name)
logger.info('Running provisioning on %s', instance)
_, log = instance.run_provisioning()
return log
@task()
def watch_pr():
pr_list = get_watched_pr_list()
# TODO: Update all PRs
pr=pr_list[0]
return provision_sandbox_instance(
sub_domain='sandbox', # TODO: set to 'pr<number>'
name=pr.name,
fork_name=pr.fork_name,
branch_name=pr.branch_name,
commit_id=pr.branch_name, # TODO: check if it needs to be updated for existing instances
)
@db_periodic_task(crontab(day='*'))
def update_instance_on_new_commit():
#for instance in Instance.objects.all():
pass
| # -*- encoding: utf-8 -*-
#
# Copyright (c) 2015, OpenCraft
#
# Imports #####################################################################
from huey.djhuey import crontab, db_periodic_task, task
from instance.github import get_watched_pr_list
from .models import OpenEdXInstance
# Logging #####################################################################
import logging
logger = logging.getLogger(__name__)
# Tasks #######################################################################
@task()
def provision_sandbox_instance(fork_name=None, **instance_field_dict):
logger.info('Create local instance object')
instance, _ = OpenEdXInstance.objects.get_or_create(**instance_field_dict)
if fork_name:
instance.set_fork_name(fork_name)
logger.info('Running provisioning on %s', instance)
_, log = instance.run_provisioning()
return log
@task()
def watch_pr():
pr_list = get_watched_pr_list()
# TODO: Update all PRs
pr=pr_list[0]
return provision_sandbox_instance(
sub_domain='pr.sandbox', # TODO: set to 'pr<number>'
name=pr.name,
fork_name=pr.fork_name,
branch_name=pr.branch_name,
commit_id=pr.branch_name, # TODO: check if it needs to be updated for existing instances
)
@db_periodic_task(crontab(day='*'))
def update_instance_on_new_commit():
#for instance in Instance.objects.all():
pass
| Update default PR sandbox subdomain to not overlap existing sandboxes | Update default PR sandbox subdomain to not overlap existing sandboxes
| Python | agpl-3.0 | omarkhan/opencraft,open-craft/opencraft,omarkhan/opencraft,omarkhan/opencraft,open-craft/opencraft,open-craft/opencraft,omarkhan/opencraft,brousch/opencraft,open-craft/opencraft,open-craft/opencraft,brousch/opencraft,brousch/opencraft | ---
+++
@@ -37,7 +37,7 @@
# TODO: Update all PRs
pr=pr_list[0]
return provision_sandbox_instance(
- sub_domain='sandbox', # TODO: set to 'pr<number>'
+ sub_domain='pr.sandbox', # TODO: set to 'pr<number>'
name=pr.name,
fork_name=pr.fork_name,
branch_name=pr.branch_name, |
7cc1a78d4fefcb216a6c8d5128d05ba9e70f5246 | jazzband/hooks.py | jazzband/hooks.py | from flask.ext.hookserver import Hooks
from .models import db, User
hooks = Hooks()
@hooks.hook('ping')
def ping(data, guid):
return 'pong'
@hooks.hook('membership')
def membership(data, guid):
if data['scope'] != 'team':
return
member = User.query.filter_by(id=data['member']['id']).first()
if member is None:
return
if data['action'] == 'added':
member.is_member = True
db.session.commit()
elif data['action'] == 'removed':
member.is_member = False
db.session.commit()
| from flask.ext.hookserver import Hooks
from .models import db, User
hooks = Hooks()
@hooks.hook('ping')
def ping(data, guid):
return 'pong'
@hooks.hook('membership')
def membership(data, guid):
if data['scope'] != 'team':
return
member = User.query.filter_by(id=data['member']['id']).first()
if member is None:
return
if data['action'] == 'added':
member.is_member = True
db.session.commit()
elif data['action'] == 'removed':
member.is_member = False
db.session.commit()
return "Thanks"
| Return a response for the membership webhook. | Return a response for the membership webhook.
| Python | mit | jazzband/website,jazzband/website,jazzband/website,jazzband/site,jazzband/jazzband-site,jazzband/site,jazzband/jazzband-site,jazzband/website | ---
+++
@@ -24,3 +24,4 @@
elif data['action'] == 'removed':
member.is_member = False
db.session.commit()
+ return "Thanks" |
acb348ae14bbbef6ea6a51d926fef975f8684fdd | gather/commands.py | gather/commands.py | from gather.organiser import NotEnoughPlayersError
def strip_help(bot):
messages = []
for regex, action in bot.actions.values():
if action.__doc__:
messages.append(action.__doc__.strip())
return messages
async def bot_help(bot, channel, author, message):
await bot.say_lines(channel, strip_help(bot))
async def add(bot, channel, author, message):
"""
- !add, !s - add yourself to the pool
"""
bot.organiser.add(channel, author)
await bot.say(channel, 'You are now signed in, {0}.'.format(author))
try:
team_one, team_two = bot.organiser.select_teams()
# TODO: Announce the game
except NotEnoughPlayersError:
pass
async def remove(bot, channel, author, message):
"""
- !remove, !so - remove yourself from the pool
"""
bot.organiser.remove(channel, author)
await bot.say(channel, 'You are now signed out, {0}.'.format(author))
| from gather.organiser import NotEnoughPlayersError
def strip_help(bot):
messages = []
for regex, action in bot.actions.values():
if action.__doc__:
messages.append(action.__doc__.strip())
return messages
async def bot_help(bot, channel, author, message):
await bot.say_lines(channel, strip_help(bot))
async def add(bot, channel, author, message):
"""
- !add, !s - add yourself to the pool
"""
bot.organiser.add(channel, author)
await bot.say(channel, 'You are now signed in, {0}.'.format(author))
try:
team_one, team_two = bot.organiser.pop_teams(channel)
# TODO: Announce the game
except NotEnoughPlayersError:
pass
async def remove(bot, channel, author, message):
"""
- !remove, !so - remove yourself from the pool
"""
bot.organiser.remove(channel, author)
await bot.say(channel, 'You are now signed out, {0}.'.format(author))
| Use right command to select teams | Use right command to select teams
| Python | mit | veryhappythings/discord-gather | ---
+++
@@ -22,7 +22,7 @@
await bot.say(channel, 'You are now signed in, {0}.'.format(author))
try:
- team_one, team_two = bot.organiser.select_teams()
+ team_one, team_two = bot.organiser.pop_teams(channel)
# TODO: Announce the game
except NotEnoughPlayersError:
pass |
6d1dcaa08969e369b25debc010d7675ec8099724 | pnnl/models/__init__.py | pnnl/models/__init__.py | import importlib
import logging
from volttron.platform.agent import utils
_log = logging.getLogger(__name__)
utils.setup_logging()
__version__ = "0.1"
__all__ = ['Model']
class Model(object):
def __init__(self, config, **kwargs):
self.model = None
config = self.store_model_config(config)
if not config:
return
base_module = "volttron.pnnl.models."
try:
model_type = config["model_type"]
except KeyError as e:
_log.exception("Missing Model Type key: {}".format(e))
raise e
_file, model_type = model_type.split(".")
module = importlib.import_module(base_module + _file)
self.model_class = getattr(module, model_type)
def get_q(self, _set, sched_index, market_index, occupied):
q = self.model.predict(_set, sched_index, market_index, occupied)
return q
def store_model_config(self, _config):
try:
config = self.vip.config.get("model")
except KeyError:
config = {}
self.vip.config.set("model", _config, send_update=False)
_config.update(config)
return _config
| import importlib
import logging
from volttron.platform.agent import utils
_log = logging.getLogger(__name__)
utils.setup_logging()
__version__ = "0.1"
__all__ = ['Model']
class Model(object):
def __init__(self, config, **kwargs):
self.model = None
config = self.store_model_config(config)
if not config:
return
base_module = "volttron.pnnl.models."
try:
model_type = config["model_type"]
except KeyError as e:
_log.exception("Missing Model Type key: {}".format(e))
raise e
_file, model_type = model_type.split(".")
module = importlib.import_module(base_module + _file)
self.model_class = getattr(module, model_type)
def get_q(self, _set, sched_index, market_index, occupied):
q = self.model.predict(_set, sched_index, market_index, occupied)
return q
def store_model_config(self, _config):
try:
config = self.vip.config.get("model")
except KeyError:
config = {}
try:
self.vip.config.set("model", _config, send_update=False)
except RuntimeError:
_log.debug("Cannot change config store on config callback!")
_config.update(config)
return _config
| Add check to verify if model is in config callback state. | Add check to verify if model is in config callback state.
| Python | bsd-3-clause | VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications,VOLTTRON/volttron-applications | ---
+++
@@ -34,7 +34,10 @@
config = self.vip.config.get("model")
except KeyError:
config = {}
+ try:
self.vip.config.set("model", _config, send_update=False)
+ except RuntimeError:
+ _log.debug("Cannot change config store on config callback!")
_config.update(config)
return _config
|
7ce8e8e217e80098ce2b6371dd6c117009843602 | pyxform/tests_v1/test_whitespace.py | pyxform/tests_v1/test_whitespace.py | from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class WhitespaceTest(PyxformTestCase):
def test_over_trim(self):
self.assertPyxformXform(
name='issue96',
md="""
| survey | | | |
| | type | label | name |
| | text | Ignored | var |
| | note | ${var} text | label |
""",
xml__contains=[
'<label><output value=" /issue96/var "/> text </label>',
])
def test_empty_label_squashing(self):
self.assertPyxformXform(
name='empty_label',
debug=True,
ss_structure={'survey': [
{ 'type':'note', 'label':'', 'name':'label' } ] },
xml__contains=[
'<label></label>',
])
| from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class WhitespaceTest(PyxformTestCase):
def test_over_trim(self):
self.assertPyxformXform(
name='issue96',
md="""
| survey | | | |
| | type | label | name |
| | text | Ignored | var |
| | note | ${var} text | label |
""",
xml__contains=[
'<label><output value=" /issue96/var "/> text </label>',
])
| Remove test that can't be fulfilled | Remove test that can't be fulfilled
| Python | bsd-2-clause | XLSForm/pyxform,XLSForm/pyxform | ---
+++
@@ -13,13 +13,3 @@
xml__contains=[
'<label><output value=" /issue96/var "/> text </label>',
])
-
- def test_empty_label_squashing(self):
- self.assertPyxformXform(
- name='empty_label',
- debug=True,
- ss_structure={'survey': [
- { 'type':'note', 'label':'', 'name':'label' } ] },
- xml__contains=[
- '<label></label>',
- ]) |
7e6de52fd77ab865164368d90f7c2cfc8369b4fe | pyinfra_cli/__main__.py | pyinfra_cli/__main__.py | import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
| import os
import signal
import sys
import click
import gevent
import pyinfra
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Set CLI mode
pyinfra.is_cli = True
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Force line buffering
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
sys.stderr = os.fdopen(sys.stderr.fileno(), 'w', 1)
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
if __name__ == 'pyinfra_cli.__main__':
execute_pyinfra()
| Use `gevent.signal_handler` not `gevent.signal` (gevent1.5). | Use `gevent.signal_handler` not `gevent.signal` (gevent1.5).
| Python | mit | Fizzadar/pyinfra,Fizzadar/pyinfra | ---
+++
@@ -33,7 +33,7 @@
sys.exit(0)
-gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
+gevent.signal_handler(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
|
f00e8bcadd3d71ed8ab797073a744f5d3d9648ea | wrapper.py | wrapper.py | #!/usr/bin/env python
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import imp
import os
def WrapperPath():
return os.path.join(os.path.dirname(__file__), 'repo')
_wrapper_module = None
def Wrapper():
global _wrapper_module
if not _wrapper_module:
_wrapper_module = imp.load_source('wrapper', WrapperPath())
return _wrapper_module
| #!/usr/bin/env python
#
# Copyright (C) 2014 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import imp
import os
def WrapperPath():
#return os.path.join(os.path.dirname(__file__), 'repo')
return os.path.join(os.path.dirname(__file__), '')
_wrapper_module = None
def Wrapper():
global _wrapper_module
if not _wrapper_module:
_wrapper_module = imp.load_source('wrapper', WrapperPath())
return _wrapper_module
| Remove redundant 'repo' in WrapperPath() | Remove redundant 'repo' in WrapperPath()
| Python | apache-2.0 | jcfrank/myrepo,jcfrank/myrepo | ---
+++
@@ -20,7 +20,8 @@
def WrapperPath():
- return os.path.join(os.path.dirname(__file__), 'repo')
+ #return os.path.join(os.path.dirname(__file__), 'repo')
+ return os.path.join(os.path.dirname(__file__), '')
_wrapper_module = None
def Wrapper(): |
20d766de4d20355303b5b423dd30bf0cd4c8ee8e | createGlyphsPDF.py | createGlyphsPDF.py | from fontTools.pens.cocoaPen import CocoaPen
# Some configuration
page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values
my_selection = CurrentFont() # May also be CurrentFont.selection or else
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
print 'Registered glyph:', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
xMin, yMin, xMax, yMax = self.glyph.box
self.w = xMax - xMin
self.h = yMax - yMin
ratio = self.w/self.h
return ratio
def drawGlyphOnNewPage(self):
newPage(page_format)
self._drawGlyph()
def _drawGlyph(self):
pen = CocoaPen(self.glyph.getParent())
self.glyph.draw(pen)
drawPath(pen.path)
for g in my_selection:
if len(g) > 0: # Ignore whitespace glyphs
glyph = RegisterGlyph(g)
glyph.drawGlyphOnNewPage()
| from fontTools.pens.cocoaPen import CocoaPen
# Some configuration
page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values
margins = (50,50,50,50) # left, top, right, bottom
my_selection = CurrentFont() # May also be CurrentFont.selection or else
# Init
size(page_format)
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
#print 'Registered glyph:', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
def getProportionRatio(self):
xMin, yMin, xMax, yMax = self.glyph.box
self.w = xMax - xMin
self.h = yMax - yMin
ratio = self.w/self.h
return ratio
def drawGlyphOnNewPage(self):
newPage()
print
self._drawGlyph()
def _drawGlyph(self):
pen = CocoaPen(self.glyph.getParent())
self.glyph.draw(pen)
drawPath(pen.path)
for g in my_selection:
if len(g) > 0: # Ignore whitespace glyphs
glyph = RegisterGlyph(g)
glyph.drawGlyphOnNewPage()
| Set size nonce at the beginning of the script | Set size nonce at the beginning of the script
| Python | mit | AlphabetType/DrawBot-Scripts | ---
+++
@@ -2,14 +2,18 @@
# Some configuration
page_format = 'A4' # See http://drawbot.readthedocs.org/content/canvas/pages.html#size for other size-values
+margins = (50,50,50,50) # left, top, right, bottom
my_selection = CurrentFont() # May also be CurrentFont.selection or else
+
+# Init
+size(page_format)
class RegisterGlyph(object):
def __init__(self, glyph):
self.glyph = glyph
- print 'Registered glyph:', self.glyph.name
+ #print 'Registered glyph:', self.glyph.name
self.proportion_ratio = self.getProportionRatio()
@@ -22,7 +26,8 @@
return ratio
def drawGlyphOnNewPage(self):
- newPage(page_format)
+ newPage()
+ print
self._drawGlyph()
def _drawGlyph(self): |
45d27a34b0254123f8eb681a90d1cecfaab0b838 | ipywidgets/_version.py | ipywidgets/_version.py | # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (7, 0, 0, 'beta', 9)
_specifier_ = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc', 'final': ''}
__version__ = '%s.%s.%s%s'%(version_info[0], version_info[1], version_info[2],
'' if version_info[3]=='final' else _specifier_[version_info[3]]+str(version_info[4]))
__protocol_version__ = '2.0.0'
__jupyter_widgets_base_version__ = '1.0.0'
__jupyter_widgets_controls_version__ = '1.0.0'
# A compatible @jupyter-widgets/html-manager npm package semver range
__html_manager_version__ = '^0.6.0'
| # Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (7, 0, 0, 'beta', 9)
_specifier_ = {'alpha': 'a', 'beta': 'b', 'candidate': 'rc', 'final': ''}
__version__ = '%s.%s.%s%s'%(version_info[0], version_info[1], version_info[2],
'' if version_info[3]=='final' else _specifier_[version_info[3]]+str(version_info[4]))
__protocol_version__ = '2.0.0'
__jupyter_widgets_base_version__ = '1.0.0'
__jupyter_widgets_controls_version__ = '1.0.0'
# A compatible @jupyter-widgets/html-manager npm package semver range
__html_manager_version__ = '^0.7.0'
| Bump the version of html-manager for the embed script. | Bump the version of html-manager for the embed script. | Python | bsd-3-clause | ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets | ---
+++
@@ -13,4 +13,4 @@
__jupyter_widgets_controls_version__ = '1.0.0'
# A compatible @jupyter-widgets/html-manager npm package semver range
-__html_manager_version__ = '^0.6.0'
+__html_manager_version__ = '^0.7.0' |
46e20cb4ac6571b922968674f2127318eef68821 | common/utilities/throttling.py | common/utilities/throttling.py | from rest_framework import throttling
class ThrottlingBySession(throttling.SimpleRateThrottle):
"""
Limits the rating of facility service to only 10 per day per IP.
This rate is configurable at the DRF settings.DEFAULT_THROTTLE_RATES.
The rate will apply to both the publc user and other authenticated users.
"""
scope = 'rating'
scope_attr = 'throttle_scope'
def get_cache_key(self, request, view):
"""
Override this method in order to have an ip based cache key
for authenticated users instead of the usual user.pk based cache key.
"""
fs_identity = request.DATA.get('facility_service', None)
if fs_identity:
machine = self.get_ident(request)
ident = machine + fs_identity
if request.user.is_authenticated():
return self.cache_format % {
'scope': self.scope,
'ident': ident
}
else:
return None
| from rest_framework import throttling
class ThrottlingBySession(throttling.SimpleRateThrottle):
"""
Limits the rating of facility service to only 10 per day per IP.
This rate is configurable at the DRF settings.DEFAULT_THROTTLE_RATES.
The rate will apply to both the publc user and other authenticated users.
"""
scope = 'rating'
scope_attr = 'throttle_scope'
def get_cache_key(self, request, view):
"""
Override this method in order to have an ip based cache key
for authenticated users instead of the usual user.pk based cache key.
"""
fs_identity = request.DATA.get('facility_service', None)
if fs_identity:
machine = self.get_ident(request)
ident = machine + fs_identity
return self.cache_format % {
'scope': self.scope,
'ident': ident
}
else:
return None
| Remove is_autheticated check as by default all the endpoints are authenticated | Remove is_autheticated check as by default all the endpoints are authenticated
| Python | mit | MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api | ---
+++
@@ -20,11 +20,9 @@
if fs_identity:
machine = self.get_ident(request)
ident = machine + fs_identity
-
- if request.user.is_authenticated():
- return self.cache_format % {
- 'scope': self.scope,
- 'ident': ident
- }
+ return self.cache_format % {
+ 'scope': self.scope,
+ 'ident': ident
+ }
else:
return None |
7125760a5d824ebe4bab81d688113a877f1c78c3 | csvkit/__init__.py | csvkit/__init__.py | #!/usr/bin/env python
from csvkit.unicsv import UnicodeCSVReader, UnicodeCSVWriter
class CSVKitReader(UnicodeCSVReader):
"""
A unicode-aware CSV reader with some additional features.
"""
pass
class CSVKitWriter(UnicodeCSVWriter):
"""
A unicode-aware CSV writer with some additional features.
"""
def __init__(self, f, encoding='utf-8', line_numbers=False, **kwargs):
self.row_count = 0
self.line_numbers = line_numbers
UnicodeCSVWriter.__init__(self, f, encoding, lineterminator='\n', **kwargs)
def _append_line_number(self, row):
if self.row_count == 0:
row.insert(0, 'line_number')
else:
row.insert(0, self.row_count)
self.row_count += 1
def writerow(self, row):
if self.line_numbers:
row = list(row)
self._append_line_number(row)
# Convert embedded Mac line endings to unix style line endings so they get quoted
row = [i.replace('\r', '\n') for i in row]
UnicodeCSVWriter.writerow(self, row)
def writerows(self, rows):
for row in rows:
self.writerow(row)
| #!/usr/bin/env python
from csvkit.unicsv import UnicodeCSVReader, UnicodeCSVWriter
class CSVKitReader(UnicodeCSVReader):
"""
A unicode-aware CSV reader with some additional features.
"""
pass
class CSVKitWriter(UnicodeCSVWriter):
"""
A unicode-aware CSV writer with some additional features.
"""
def __init__(self, f, encoding='utf-8', line_numbers=False, **kwargs):
self.row_count = 0
self.line_numbers = line_numbers
UnicodeCSVWriter.__init__(self, f, encoding, lineterminator='\n', **kwargs)
def _append_line_number(self, row):
if self.row_count == 0:
row.insert(0, 'line_number')
else:
row.insert(0, self.row_count)
self.row_count += 1
def writerow(self, row):
if self.line_numbers:
row = list(row)
self._append_line_number(row)
# Convert embedded Mac line endings to unix style line endings so they get quoted
row = [i.replace('\r', '\n') if isinstance(i, basestring) else i for i in row]
UnicodeCSVWriter.writerow(self, row)
def writerows(self, rows):
for row in rows:
self.writerow(row)
| Fix serialization bug in CSVKitWriter that was breaking 4 tests. | Fix serialization bug in CSVKitWriter that was breaking 4 tests.
| Python | mit | archaeogeek/csvkit,Tabea-K/csvkit,Jobava/csvkit,kyeoh/csvkit,moradology/csvkit,tlevine/csvkit,doganmeh/csvkit,bmispelon/csvkit,reubano/csvkit,wireservice/csvkit,wjr1985/csvkit,aequitas/csvkit,barentsen/csvkit,dannguyen/csvkit,arowla/csvkit,themiurgo/csvkit,KarrieK/csvkit,haginara/csvkit,onyxfish/csvkit,metasoarous/csvkit,nriyer/csvkit,bradparks/csvkit__query_join_filter_CSV_cli,matterker/csvkit,jpalvarezf/csvkit,gepuro/csvkit,elcritch/csvkit,cypreess/csvkit,unpingco/csvkit,snuggles08/csvkit | ---
+++
@@ -31,7 +31,7 @@
self._append_line_number(row)
# Convert embedded Mac line endings to unix style line endings so they get quoted
- row = [i.replace('\r', '\n') for i in row]
+ row = [i.replace('\r', '\n') if isinstance(i, basestring) else i for i in row]
UnicodeCSVWriter.writerow(self, row)
|
3fb772c8d5958a8a93ab1cedc87c61d513b84eea | savanna/utils/crypto.py | savanna/utils/crypto.py | # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Crypto.PublicKey import RSA
from Crypto import Random
import paramiko
import StringIO
def generate_private_key(length=2048):
"""Generate RSA private key (str) with the specified length."""
rsa = RSA.generate(length, Random.new().read)
return rsa.exportKey('PEM')
def to_paramiko_private_key(pkey):
"""Convert private key (str) to paramiko-specific RSAKey object."""
return paramiko.RSAKey(file_obj=StringIO.StringIO(pkey))
def private_key_to_public_key(key):
"""Convert private key (str) to public key (str)."""
return RSA.importKey(key).exportKey('OpenSSH')
| # Copyright (c) 2013 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Crypto.PublicKey import RSA
from Crypto import Random
import paramiko
from six import StringIO
def generate_private_key(length=2048):
"""Generate RSA private key (str) with the specified length."""
rsa = RSA.generate(length, Random.new().read)
return rsa.exportKey('PEM')
def to_paramiko_private_key(pkey):
"""Convert private key (str) to paramiko-specific RSAKey object."""
return paramiko.RSAKey(file_obj=StringIO.StringIO(pkey))
def private_key_to_public_key(key):
"""Convert private key (str) to public key (str)."""
return RSA.importKey(key).exportKey('OpenSSH')
| Use six to improve python 3 compatibility. | Use six to improve python 3 compatibility.
* StringIO
Change-Id: I8471e525566a0353d9276529be4b0d0e0cbf6cd6
| Python | apache-2.0 | rnirmal/savanna,bigfootproject/sahara,openstack/sahara,tellesnobrega/storm_plugin,zhangjunli177/sahara,bigfootproject/sahara,xme1226/sahara,tellesnobrega/sahara,esikachev/sahara-backup,crobby/sahara,zhujzhuo/Sahara,citrix-openstack-build/sahara,matips/iosr-2015,keedio/sahara,matips/iosr-2015,henaras/sahara,xme1226/sahara,zhujzhuo/Sahara,keedio/sahara,esikachev/sahara-backup,zhangjunli177/sahara,henaras/sahara,crobby/sahara,esikachev/scenario,ekasitk/sahara,matips/iosr-2015,tellesnobrega/storm_plugin,mapr/sahara,esikachev/scenario,redhat-openstack/sahara,tellesnobrega/sahara,mapr/sahara,ekasitk/sahara,henaras/sahara,openstack/sahara,keedio/sahara,mapr/sahara,egafford/sahara,citrix-openstack-build/sahara,tellesnobrega/storm_plugin,egafford/sahara,ekasitk/sahara,redhat-openstack/sahara,zhujzhuo/Sahara,xme1226/sahara,redhat-openstack/sahara,esikachev/sahara-backup,bigfootproject/sahara,rnirmal/savanna,esikachev/scenario,citrix-openstack-build/sahara,zhangjunli177/sahara,crobby/sahara | ---
+++
@@ -16,7 +16,7 @@
from Crypto.PublicKey import RSA
from Crypto import Random
import paramiko
-import StringIO
+from six import StringIO
def generate_private_key(length=2048): |
01b8b80bd694a93bce9cbdd2541f3e262f95c015 | tests/test_commands.py | tests/test_commands.py | from __future__ import unicode_literals, absolute_import
import sys
import unittest
import mock
from fm import Fm
class TestFm(unittest.TestCase):
def setUp(self):
self.mock_stdin = mock.create_autospec(sys.stdin)
self.mock_stdout = mock.create_autospec(sys.stdout)
def create(self):
return Fm(stdin=self.mock_stdin, stdout=self.mock_stdout)
def _last_response(self, number_of_lines=None):
if number_of_lines is None:
return self.mock_stdout.write.call_args_list[0][0][0]
return ''.join(map(lambda c: c[0][0][0], self.mock_stdout.write.call_args_list[-number_of_lines:]))
def test_play(self):
cli = self.create()
self.assertFalse(cli.onecmd('play'))
self.assertEqual(self._last_response(), 'debug: play/p command output\n')
self.mock_stdout.reset_mock()
self.assertFalse(cli.onecmd('p'))
self.assertEqual(self._last_response(), 'debug: play/p command output\n')
self.mock_stdout.reset_mock()
if __name__ == '__main__':
unittest.main()
| from __future__ import unicode_literals, absolute_import
import sys
import unittest
import mock
import re
from fm import Fm
class TestFm(unittest.TestCase):
def setUp(self):
self.mock_stdin = mock.create_autospec(sys.stdin)
self.mock_stdout = mock.create_autospec(sys.stdout)
def create(self):
return Fm(stdin=self.mock_stdin, stdout=self.mock_stdout)
def _clear_coloring(self, text):
return re.sub(r'\033\[[0-9]{1,2}m', '', text)
def _last_response(self):
return self._clear_coloring(self.mock_stdout.write.call_args_list[0][0][0])
def test_wrong_command(self):
cli = self.create()
self.assertFalse(cli.onecmd('wrong_command'))
self.assertEqual(self._last_response(), 'Unknown command wrong_command\n')
self.mock_stdout.reset_mock()
def test_play(self):
cli = self.create()
self.assertFalse(cli.onecmd('play'))
self.assertEqual(self._last_response(), 'debug: play/p command output\n')
self.mock_stdout.reset_mock()
self.assertFalse(cli.onecmd('p'))
self.assertEqual(self._last_response(), 'debug: play/p command output\n')
self.mock_stdout.reset_mock()
def test_quit(self):
cli = self.create()
with self.assertRaises(SystemExit):
cli.onecmd('quit')
with self.assertRaises(SystemExit):
cli.onecmd('q')
if __name__ == '__main__':
unittest.main()
| Clear output from colors in test cases | Clear output from colors in test cases
| Python | bsd-3-clause | roman-kachanovsky/cmd.fm-python | ---
+++
@@ -3,6 +3,7 @@
import sys
import unittest
import mock
+import re
from fm import Fm
@@ -16,10 +17,17 @@
def create(self):
return Fm(stdin=self.mock_stdin, stdout=self.mock_stdout)
- def _last_response(self, number_of_lines=None):
- if number_of_lines is None:
- return self.mock_stdout.write.call_args_list[0][0][0]
- return ''.join(map(lambda c: c[0][0][0], self.mock_stdout.write.call_args_list[-number_of_lines:]))
+ def _clear_coloring(self, text):
+ return re.sub(r'\033\[[0-9]{1,2}m', '', text)
+
+ def _last_response(self):
+ return self._clear_coloring(self.mock_stdout.write.call_args_list[0][0][0])
+
+ def test_wrong_command(self):
+ cli = self.create()
+ self.assertFalse(cli.onecmd('wrong_command'))
+ self.assertEqual(self._last_response(), 'Unknown command wrong_command\n')
+ self.mock_stdout.reset_mock()
def test_play(self):
cli = self.create()
@@ -30,6 +38,13 @@
self.assertEqual(self._last_response(), 'debug: play/p command output\n')
self.mock_stdout.reset_mock()
+ def test_quit(self):
+ cli = self.create()
+ with self.assertRaises(SystemExit):
+ cli.onecmd('quit')
+ with self.assertRaises(SystemExit):
+ cli.onecmd('q')
+
if __name__ == '__main__':
unittest.main() |
3be0c6c18a61f35ae5804464cc0da867fd0065f5 | tests/test_ez_setup.py | tests/test_ez_setup.py | import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import (use_setuptools, _python_cmd, _install)
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
def test_use_setuptools(self):
self.assertEqual(use_setuptools(), None)
if __name__ == '__main__':
unittest.main()
| import sys
import os
import tempfile
import unittest
import shutil
import copy
CURDIR = os.path.abspath(os.path.dirname(__file__))
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
def urlopen(self, url):
return open(self.tarball, 'rb')
def setUp(self):
self.old_sys_path = copy.copy(sys.path)
self.cwd = os.getcwd()
self.tmpdir = tempfile.mkdtemp()
os.chdir(TOPDIR)
_python_cmd("setup.py", "-q", "egg_info", "-RDb", "", "sdist",
"--formats", "zip", "--dist-dir", self.tmpdir)
zipball = os.listdir(self.tmpdir)[0]
self.zipball = os.path.join(self.tmpdir, zipball)
from setuptools.compat import urllib2
urllib2.urlopen = self.urlopen
def tearDown(self):
shutil.rmtree(self.tmpdir)
os.chdir(self.cwd)
sys.path = copy.copy(self.old_sys_path)
def test_install(self):
def _faked(*args):
return True
ez_setup._python_cmd = _faked
_install(self.zipball)
if __name__ == '__main__':
unittest.main()
| Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present. | Remove test for use_setuptools, as it fails when running under pytest because the installed version of setuptools is already present.
| Python | mit | pypa/setuptools,pypa/setuptools,pypa/setuptools | ---
+++
@@ -9,7 +9,7 @@
TOPDIR = os.path.split(CURDIR)[0]
sys.path.insert(0, TOPDIR)
-from ez_setup import (use_setuptools, _python_cmd, _install)
+from ez_setup import _python_cmd, _install
import ez_setup
class TestSetup(unittest.TestCase):
@@ -40,8 +40,5 @@
ez_setup._python_cmd = _faked
_install(self.zipball)
- def test_use_setuptools(self):
- self.assertEqual(use_setuptools(), None)
-
if __name__ == '__main__':
unittest.main() |
d9ffd877f646b3a5c020ed823d3541135af74fef | tests/test_question.py | tests/test_question.py | from pywatson.question.question import Question
class TestQuestion:
def test___init___basic(self, questions):
question = Question(questions[0]['questionText'])
assert question.question_text == questions[0]['questionText']
def test_ask_question_basic(self, watson):
answer = watson.ask_question('What is the Labour Code?')
assert type(answer) is Answer
| from pywatson.question.evidence_request import EvidenceRequest
from pywatson.question.filter import Filter
from pywatson.question.question import Question
class TestQuestion(object):
"""Unit tests for the Question class"""
def test___init___basic(self, questions):
"""Question is constructed properly with just question_text"""
question = Question(questions[0]['questionText'])
assert question.question_text == questions[0]['questionText']
def test___init___complete(self, questions):
"""Question is constructed properly with all parameters provided"""
q = questions[1]
er = q['evidenceRequest']
evidence_request = EvidenceRequest(er['items'], er['profile'])
filters = [Filter(f['filterType'], f['filterName'], f['values']) for f in q['filters']]
question = Question(question_text=q['questionText'],
answer_assertion=q['answerAssertion'],
category=q['category'],
context=q['context'],
evidence_request=evidence_request,
filters=filters,
formatted_answer=q['formattedAnswer'],
items=q['items'],
lat=q['lat'],
passthru=q['passthru'],
synonym_list=q['synonymList'])
assert question.question_text == q['questionText']
assert question.answer_assertion == q['answerAssertion']
assert question.category == q['category']
assert question.context == q['context']
assert question.evidence_request == er
assert question.filters == filters
| Add complete question constructor test | Add complete question constructor test
| Python | mit | sherlocke/pywatson | ---
+++
@@ -1,11 +1,37 @@
+from pywatson.question.evidence_request import EvidenceRequest
+from pywatson.question.filter import Filter
from pywatson.question.question import Question
-class TestQuestion:
+class TestQuestion(object):
+ """Unit tests for the Question class"""
+
def test___init___basic(self, questions):
+ """Question is constructed properly with just question_text"""
question = Question(questions[0]['questionText'])
assert question.question_text == questions[0]['questionText']
- def test_ask_question_basic(self, watson):
- answer = watson.ask_question('What is the Labour Code?')
- assert type(answer) is Answer
+ def test___init___complete(self, questions):
+ """Question is constructed properly with all parameters provided"""
+ q = questions[1]
+ er = q['evidenceRequest']
+ evidence_request = EvidenceRequest(er['items'], er['profile'])
+ filters = [Filter(f['filterType'], f['filterName'], f['values']) for f in q['filters']]
+ question = Question(question_text=q['questionText'],
+ answer_assertion=q['answerAssertion'],
+ category=q['category'],
+ context=q['context'],
+ evidence_request=evidence_request,
+ filters=filters,
+ formatted_answer=q['formattedAnswer'],
+ items=q['items'],
+ lat=q['lat'],
+ passthru=q['passthru'],
+ synonym_list=q['synonymList'])
+
+ assert question.question_text == q['questionText']
+ assert question.answer_assertion == q['answerAssertion']
+ assert question.category == q['category']
+ assert question.context == q['context']
+ assert question.evidence_request == er
+ assert question.filters == filters |
3979e0a2ebdba434a215d5f89cc58f2737ac0111 | tomviz/python/ReinterpretSignedToUnsigned.py | tomviz/python/ReinterpretSignedToUnsigned.py | def transform_scalars(dataset):
"""Reinterpret a signed integral array type as its unsigned counterpart.
This can be used when the bytes of a data array have been interpreted as a
signed array when it should have been interpreted as an unsigned array."""
from tomviz import utils
import numpy as np
scalars = utils.get_scalars(dataset)
if scalars is None:
raise RuntimeError("No scalars found!")
dtype = scaldars.dtype
dtype = dtype.type
typeMap = {
np.int8: np.uint8,
np.int16: np.uint16,
np.int32: np.uint32
}
typeAddend = {
np.int8: 128,
np.int16: 32768,
np.int32: 2147483648
}
if dtype not in typeMap:
raise RuntimeError("Scalars are not int8, int16, or int32")
newType = typeMap[dtype]
addend = typeAddend[dtype]
newScalars = scalars.astype(dtype=newType) + addend
utils.set_scalars(dataset, newScalars)
| def transform_scalars(dataset):
"""Reinterpret a signed integral array type as its unsigned counterpart.
This can be used when the bytes of a data array have been interpreted as a
signed array when it should have been interpreted as an unsigned array."""
from tomviz import utils
import numpy as np
scalars = utils.get_scalars(dataset)
if scalars is None:
raise RuntimeError("No scalars found!")
dtype = scalars.dtype
dtype = dtype.type
typeMap = {
np.int8: np.uint8,
np.int16: np.uint16,
np.int32: np.uint32
}
typeAddend = {
np.int8: 128,
np.int16: 32768,
np.int32: 2147483648
}
if dtype not in typeMap:
raise RuntimeError("Scalars are not int8, int16, or int32")
newType = typeMap[dtype]
addend = typeAddend[dtype]
newScalars = scalars.astype(dtype=newType) + addend
utils.set_scalars(dataset, newScalars)
| Fix typo in Python script | Fix typo in Python script
| Python | bsd-3-clause | thewtex/tomviz,cjh1/tomviz,OpenChemistry/tomviz,cjh1/tomviz,thewtex/tomviz,OpenChemistry/tomviz,cryos/tomviz,OpenChemistry/tomviz,OpenChemistry/tomviz,cryos/tomviz,cjh1/tomviz,mathturtle/tomviz,mathturtle/tomviz,cryos/tomviz,mathturtle/tomviz,thewtex/tomviz | ---
+++
@@ -9,7 +9,7 @@
if scalars is None:
raise RuntimeError("No scalars found!")
- dtype = scaldars.dtype
+ dtype = scalars.dtype
dtype = dtype.type
typeMap = { |
4ade8ef1f53340a9efd053eeae1694724187c4be | scripts/poweron/DRAC.py | scripts/poweron/DRAC.py | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/usr/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | import subprocess, sys, os.path
class DRAC_NO_SUPP_PACK(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
class DRAC_POWERON_FAILED(Exception):
"""Base Exception class for all transfer plugin errors."""
def __init__(self, *args):
Exception.__init__(self, *args)
def run2(command):
run = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# Wait for the process to return
out, err = [ e.splitlines() for e in run.communicate() ]
return run.returncode, out, err
drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK()
cmd='%s -r %s -u %s -p %s serveraction powerup' % (drac_path, power_on_ip, user, password)
retcode,out,err=run2(cmd)
if(len(err)==0):
return str(True)
else:
raise DRAC_POWERON_FAILED()
def main():
if len(sys.argv)<3:
exit(0)
ip=sys.argv[1]
user=sys.argv[2]
password=sys.argv[3]
print DRAC(ip,user,password)
if __name__ == "__main__":
main() | Change path to the supplemental pack | CA-40618: Change path to the supplemental pack
Signed-off-by: Javier Alvarez-Valle <cf4c8668a0b4c5e013f594a6940d05b3d4d9ddcf@citrix.com>
| Python | lgpl-2.1 | srowe/xen-api,koushikcgit/xen-api,koushikcgit/xen-api,agimofcarmen/xen-api,anoobs/xen-api,euanh/xen-api,Frezzle/xen-api,agimofcarmen/xen-api,simonjbeaumont/xen-api,srowe/xen-api,cheng-z/xen-api,djs55/xen-api,agimofcarmen/xen-api,thomassa/xen-api,rafalmiel/xen-api,ravippandey/xen-api,rafalmiel/xen-api,vasilenkomike/xen-api,salvocambria/xen-api,robertbreker/xen-api,anoobs/xen-api,koushikcgit/xen-api,ravippandey/xen-api,Frezzle/xen-api,thomassa/xen-api,guard163/xen-api,cheng--zhang/xen-api,cheng-z/xen-api,cheng--zhang/xen-api,cheng--zhang/xen-api,thomassa/xen-api,agimofcarmen/xen-api,rafalmiel/xen-api,euanh/xen-api,salvocambria/xen-api,huizh/xen-api,srowe/xen-api,Frezzle/xen-api,huizh/xen-api,djs55/xen-api,vasilenkomike/xen-api,thomassa/xen-api,jjd27/xen-api,ravippandey/xen-api,cheng-z/xen-api,simonjbeaumont/xen-api,anoobs/xen-api,djs55/xen-api,srowe/xen-api,jjd27/xen-api,simonjbeaumont/xen-api,euanh/xen-api,cheng-z/xen-api,robertbreker/xen-api,simonjbeaumont/xen-api,thomassa/xen-api,koushikcgit/xen-api,cheng-z/xen-api,robertbreker/xen-api,huizh/xen-api,vasilenkomike/xen-api,guard163/xen-api,cheng--zhang/xen-api,guard163/xen-api,Frezzle/xen-api,simonjbeaumont/xen-api,jjd27/xen-api,ravippandey/xen-api,anoobs/xen-api,vasilenkomike/xen-api,simonjbeaumont/xen-api,salvocambria/xen-api,euanh/xen-api,Frezzle/xen-api,robertbreker/xen-api,jjd27/xen-api,djs55/xen-api,huizh/xen-api,vasilenkomike/xen-api,agimofcarmen/xen-api,rafalmiel/xen-api,cheng--zhang/xen-api,agimofcarmen/xen-api,salvocambria/xen-api,cheng--zhang/xen-api,cheng--zhang/xen-api,robertbreker/xen-api,guard163/xen-api,rafalmiel/xen-api,djs55/xen-api,rafalmiel/xen-api,anoobs/xen-api,koushikcgit/xen-api,salvocambria/xen-api,salvocambria/xen-api,euanh/xen-api,euanh/xen-api,djs55/xen-api,ravippandey/xen-api,thomassa/xen-api,srowe/xen-api,ravippandey/xen-api,robertbreker/xen-api,vasilenkomike/xen-api,guard163/xen-api,anoobs/xen-api,huizh/xen-api,huizh/xen-api,cheng-z/xen-api,jjd27/xen-api,cheng-z/xen-api,guard163/xen-api,jjd27/xen-api,koushikcgit/xen-api | ---
+++
@@ -18,7 +18,7 @@
return run.returncode, out, err
-drac_path='/usr/sbin/racadm'
+drac_path='/opt/dell/srvadmin/sbin/racadm'
def DRAC( power_on_ip, user, password):
if( not os.path.exists(drac_path)):
raise DRAC_NO_SUPP_PACK() |
4b06210ddcff50e9aa99a86edeb99e1a6200d5a9 | health_monitor/urls.py | health_monitor/urls.py | from django.conf.urls import url
from health_monitor import views
urlpatterns = [
url(r'^health/(?P<uid>[\w-]*)/$', views.read, name='read'),
url(r'^health/(?P<uid>[\w-]*)/update/(?P<test_name>[\w-]*)/$', views.update, name='update'),
url(r'^health/(?P<uid>[\w-]*)/history/(?P<group>[\w-]*)/$', views.history, name='history'),
]
| from django.conf.urls import url
from health_monitor import views
urlpatterns = [
url(r'^health/(?P<uid>[\d]*)/$', views.read, name='read'),
url(r'^health/(?P<uid>[\d]*)/update/(?P<test_name>[\w-]*)/$', views.update, name='update'),
url(r'^health/(?P<uid>[\d]*)/history/(?P<group>[\w-]*)/$', views.history, name='history'),
]
| Modify UID to be passed as number. | Modify UID to be passed as number.
| Python | apache-2.0 | seanchon/django-health-monitor,gracenote/django-health-monitor,gracenote/django-health-monitor,seanchon/django-health-monitor | ---
+++
@@ -3,7 +3,7 @@
urlpatterns = [
- url(r'^health/(?P<uid>[\w-]*)/$', views.read, name='read'),
- url(r'^health/(?P<uid>[\w-]*)/update/(?P<test_name>[\w-]*)/$', views.update, name='update'),
- url(r'^health/(?P<uid>[\w-]*)/history/(?P<group>[\w-]*)/$', views.history, name='history'),
+ url(r'^health/(?P<uid>[\d]*)/$', views.read, name='read'),
+ url(r'^health/(?P<uid>[\d]*)/update/(?P<test_name>[\w-]*)/$', views.update, name='update'),
+ url(r'^health/(?P<uid>[\d]*)/history/(?P<group>[\w-]*)/$', views.history, name='history'),
] |
f8d3477dc4a496d648ac6bfd8d26eeacd853200f | src/masterfile/formatters.py | src/masterfile/formatters.py | # -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2017 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
"""
This package contains functions for pretty-printing data — for example,
converting column numbers into Excel-like column identifier strings.
"""
import string
def index_to_column_id(number):
"""
Takes a zero-based index and converts it to a column identifier string
such as used in Excel. Examples:
0 => A
25 => Z
26 => AA
"""
if number < 0 or not isinstance(number, int):
raise AttributeError("index_to_column_id requires a non-negative int")
digits = string.ascii_uppercase
if number == 0:
return digits[0]
return __rec_int_to_colid(number, digits)
def __rec_int_to_colid(number, digits):
base = len(digits)
if number < 0:
return ''
return (
__rec_int_to_colid((number // base) - 1, digits) +
digits[number % base])
| # -*- coding: utf-8 -*-
# Part of the masterfile package: https://github.com/njvack/masterfile
# Copyright (c) 2017 Board of Regents of the University of Wisconsin System
# Written by Nate Vack <njvack@wisc.edu> at the Center for Healthy Minds
# at the University of Wisconsin-Madison.
# Released under MIT licence; see LICENSE at the package root.
"""
This package contains functions for pretty-printing data — for example,
converting column numbers into Excel-like column identifier strings.
"""
import string
def index_to_column_id(number):
"""
Takes a zero-based index and converts it to a column identifier string
such as used in Excel. Examples:
0 => A
25 => Z
26 => AA
"""
if number < 0 or not isinstance(number, int):
raise AttributeError("index_to_column_id requires a non-negative int")
digits = string.ascii_uppercase
parts = []
number += 1 # The algorithm works on 1-based input
while number > 0:
number, mod = divmod(number - 1, len(digits))
parts.insert(0, digits[mod])
return ''.join(parts)
| Replace index_to_column_id with iterative fx | Replace index_to_column_id with iterative fx
The recursive one was neat but a bit too clever. | Python | mit | njvack/masterfile | ---
+++
@@ -25,16 +25,9 @@
if number < 0 or not isinstance(number, int):
raise AttributeError("index_to_column_id requires a non-negative int")
digits = string.ascii_uppercase
- if number == 0:
- return digits[0]
- return __rec_int_to_colid(number, digits)
-
-
-def __rec_int_to_colid(number, digits):
- base = len(digits)
- if number < 0:
- return ''
-
- return (
- __rec_int_to_colid((number // base) - 1, digits) +
- digits[number % base])
+ parts = []
+ number += 1 # The algorithm works on 1-based input
+ while number > 0:
+ number, mod = divmod(number - 1, len(digits))
+ parts.insert(0, digits[mod])
+ return ''.join(parts) |
5e25577d067f891474c722000327026744068e88 | src/unittest/python/permission_lambda_tests.py | src/unittest/python/permission_lambda_tests.py | from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
def _get_permission_statements(self, client, queue_url):
""" Return a list of policy statements for given queue"""
policy_response = client.get_queue_attributes(
QueueUrl=queue_url, AttributeNames=['Policy'])
policy = policy_response['Attributes']['Policy']
return json.loads(policy)['Statement']
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "user1@domain.invalid"
},
"account2": {
"id": "987654321",
"email": "user2@domain.invalid"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
| from unittest2 import TestCase
import simplejson as json
import boto3
from moto import mock_s3
import permission_lambda
class PermissionLambdaTests(TestCase):
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket"
usofa_data = {
"account1": {
"id": "123456789",
"email": "user1@domain.invalid"
},
"account2": {
"id": "987654321",
"email": "user2@domain.invalid"
}
}
client = boto3.client('s3')
client.create_bucket(
Bucket=bucketname,
CreateBucketConfiguration={
'LocationConstraint': 'eu-west-1'
})
client.put_object(
Bucket=bucketname,
Key="accounts.json",
Body=json.dumps(usofa_data)
)
accountlist = permission_lambda.get_usofa_accountlist(bucketname)
accountlist.sort()
self.assertEqual(accountlist, ["123456789", "987654321"])
| Remove Unittests done as integrationtests, due to NotImplementedErrors from moto | PIO-129: Remove Unittests done as integrationtests, due to NotImplementedErrors from moto
| Python | apache-2.0 | ImmobilienScout24/aws-set-sqs-permission-lambda | ---
+++
@@ -6,13 +6,6 @@
class PermissionLambdaTests(TestCase):
- def _get_permission_statements(self, client, queue_url):
- """ Return a list of policy statements for given queue"""
- policy_response = client.get_queue_attributes(
- QueueUrl=queue_url, AttributeNames=['Policy'])
- policy = policy_response['Attributes']['Policy']
- return json.loads(policy)['Statement']
-
@mock_s3
def test_get_usofa_accountlist_from_bucket(self):
bucketname = "testbucket" |
5fd04a337dd6fec1afc9bf53b437cce01e4fef9e | myDevices/os/threadpool.py | myDevices/os/threadpool.py | from concurrent.futures import ThreadPoolExecutor
from myDevices.utils.singleton import Singleton
import inspect
executor = ThreadPoolExecutor(max_workers=4)
class ThreadPool(Singleton):
def Submit(something):
future = executor.submit(something)
def SubmitParam(*arg):
executor.submit(*arg)
def Shutdown():
executor.shutdown() | """
This module provides a singleton thread pool class
"""
from concurrent.futures import ThreadPoolExecutor
from myDevices.utils.singleton import Singleton
executor = ThreadPoolExecutor(max_workers=4)
class ThreadPool(Singleton):
"""Singleton thread pool class"""
@staticmethod
def Submit(func):
"""Submit a function for the thread pool to run"""
executor.submit(func)
@staticmethod
def Shutdown():
"""Shutdown the thread pool"""
executor.shutdown()
| Clean up thread pool code. | Clean up thread pool code.
| Python | mit | myDevicesIoT/Cayenne-Agent,myDevicesIoT/Cayenne-Agent | ---
+++
@@ -1,12 +1,20 @@
-from concurrent.futures import ThreadPoolExecutor
+"""
+This module provides a singleton thread pool class
+"""
+from concurrent.futures import ThreadPoolExecutor
from myDevices.utils.singleton import Singleton
-import inspect
executor = ThreadPoolExecutor(max_workers=4)
class ThreadPool(Singleton):
- def Submit(something):
- future = executor.submit(something)
- def SubmitParam(*arg):
- executor.submit(*arg)
- def Shutdown():
- executor.shutdown()
+ """Singleton thread pool class"""
+
+ @staticmethod
+ def Submit(func):
+ """Submit a function for the thread pool to run"""
+ executor.submit(func)
+
+ @staticmethod
+ def Shutdown():
+ """Shutdown the thread pool"""
+ executor.shutdown()
+ |
43776062a15640c62d94d8a9c19edadb21a773be | lab/trace_sample.py | lab/trace_sample.py | import os, sys
global nest
nest = 0
def trace(frame, event, arg):
#if event == 'line':
global nest
print "%s%s %s %d (%r)" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
arg
)
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
sys.settrace(trace)
import sample
#import littleclass
| import os, sys
global nest
nest = 0
def trace(frame, event, arg):
#if event == 'line':
global nest
print "%s%s %s %d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
)
if event == 'call':
nest += 1
if event == 'return':
nest -= 1
return trace
sys.settrace(trace)
import sample
#import littleclass
| Remove a little noise from this lab tool | Remove a little noise from this lab tool
| Python | apache-2.0 | hugovk/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,7WebPages/coveragepy,hugovk/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,nedbat/coveragepy,hugovk/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,blueyed/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,hugovk/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,larsbutler/coveragepy | ---
+++
@@ -7,12 +7,11 @@
#if event == 'line':
global nest
- print "%s%s %s %d (%r)" % (
+ print "%s%s %s %d" % (
" " * nest,
event,
os.path.basename(frame.f_code.co_filename),
frame.f_lineno,
- arg
)
if event == 'call': |
1028e1a6e15af5a20caedd5598cebc49c5486f64 | scripts/iepy_runner.py | scripts/iepy_runner.py | """
Run IEPY core loop
Usage:
iepy_runner.py <dbname> <seeds_file>
iepy_runner.py -h | --help | --version
Options:
-h --help Show this screen
--version Version number
"""
import pprint
from docopt import docopt
from iepy.core import BootstrappedIEPipeline
from iepy import db
from iepy.human_validation import TerminalInterviewer
from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv
if __name__ == '__main__':
opts = docopt(__doc__, version=0.1)
connection = db.connect(opts['<dbname>'])
seed_facts = load_facts_from_csv(opts['<seeds_file>'])
p = BootstrappedIEPipeline(connection, seed_facts)
STOP = 'STOP'
p.start() # blocking
keep_looping = True
while keep_looping:
qs = list(p.questions_available())
if not qs:
keep_looping = False
term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')])
result = term()
if result == STOP:
keep_looping = False
p.force_process()
facts = p.known_facts() # profit
save_labeled_evidence_to_csv(facts.items(), "facts.csv")
| """
Run IEPY core loop
Usage:
iepy_runner.py <dbname> <seeds_file> <output_file>
iepy_runner.py -h | --help | --version
Options:
-h --help Show this screen
--version Version number
"""
import pprint
from docopt import docopt
from iepy.core import BootstrappedIEPipeline
from iepy import db
from iepy.human_validation import TerminalInterviewer
from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv
if __name__ == '__main__':
opts = docopt(__doc__, version=0.1)
connection = db.connect(opts['<dbname>'])
seed_facts = load_facts_from_csv(opts['<seeds_file>'])
output_file = load_facts_from_csv(opts['<output_file>'])
p = BootstrappedIEPipeline(connection, seed_facts)
STOP = 'STOP'
p.start() # blocking
keep_looping = True
while keep_looping:
qs = list(p.questions_available())
if not qs:
keep_looping = False
term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')])
result = term()
if result == STOP:
keep_looping = False
p.force_process()
facts = p.known_facts() # profit
save_labeled_evidence_to_csv(facts.items(), output_file)
| Add <output_file> parameter to the IEPY runner script. | Add <output_file> parameter to the IEPY runner script.
| Python | bsd-3-clause | mrshu/iepy,machinalis/iepy,machinalis/iepy,mrshu/iepy,machinalis/iepy,mrshu/iepy | ---
+++
@@ -2,7 +2,7 @@
Run IEPY core loop
Usage:
- iepy_runner.py <dbname> <seeds_file>
+ iepy_runner.py <dbname> <seeds_file> <output_file>
iepy_runner.py -h | --help | --version
Options:
@@ -22,6 +22,7 @@
opts = docopt(__doc__, version=0.1)
connection = db.connect(opts['<dbname>'])
seed_facts = load_facts_from_csv(opts['<seeds_file>'])
+ output_file = load_facts_from_csv(opts['<output_file>'])
p = BootstrappedIEPipeline(connection, seed_facts)
STOP = 'STOP'
@@ -38,4 +39,4 @@
keep_looping = False
p.force_process()
facts = p.known_facts() # profit
- save_labeled_evidence_to_csv(facts.items(), "facts.csv")
+ save_labeled_evidence_to_csv(facts.items(), output_file) |
a79bb92e976eee795cb3118b3a03f5f0df11e14d | deepchem/feat/__init__.py | deepchem/feat/__init__.py | """
Making it easy to import in classes.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "GPL"
from deepchem.feat.base_classes import Featurizer
from deepchem.feat.base_classes import ComplexFeaturizer
from deepchem.feat.base_classes import UserDefinedFeaturizer
from deepchem.feat.graph_features import ConvMolFeaturizer
from deepchem.feat.fingerprints import CircularFingerprint
from deepchem.feat.basic import RDKitDescriptors
from deepchem.feat.coulomb_matrices import CoulombMatrixEig
from deepchem.feat.grid_featurizer import GridFeaturizer
from deepchem.feat.nnscore_utils import hydrogenate_and_compute_partial_charges
from deepchem.feat.binding_pocket_features import BindingPocketFeaturizer
| """
Making it easy to import in classes.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
__author__ = "Bharath Ramsundar"
__copyright__ = "Copyright 2016, Stanford University"
__license__ = "GPL"
from deepchem.feat.base_classes import Featurizer
from deepchem.feat.base_classes import ComplexFeaturizer
from deepchem.feat.base_classes import UserDefinedFeaturizer
from deepchem.feat.graph_features import ConvMolFeaturizer
from deepchem.feat.fingerprints import CircularFingerprint
from deepchem.feat.basic import RDKitDescriptors
from deepchem.feat.coulomb_matrices import CoulombMatrix
from deepchem.feat.coulomb_matrices import CoulombMatrixEig
from deepchem.feat.grid_featurizer import GridFeaturizer
from deepchem.feat.nnscore_utils import hydrogenate_and_compute_partial_charges
from deepchem.feat.binding_pocket_features import BindingPocketFeaturizer
| Add CoulombMatrix to dc.feat import | Add CoulombMatrix to dc.feat import
| Python | mit | ktaneishi/deepchem,ktaneishi/deepchem,ktaneishi/deepchem,miaecle/deepchem,joegomes/deepchem,peastman/deepchem,miaecle/deepchem,rbharath/deepchem,miaecle/deepchem,Agent007/deepchem,rbharath/deepchem,peastman/deepchem,deepchem/deepchem,lilleswing/deepchem,joegomes/deepchem,Agent007/deepchem,deepchem/deepchem,Agent007/deepchem,lilleswing/deepchem,lilleswing/deepchem | ---
+++
@@ -15,6 +15,7 @@
from deepchem.feat.graph_features import ConvMolFeaturizer
from deepchem.feat.fingerprints import CircularFingerprint
from deepchem.feat.basic import RDKitDescriptors
+from deepchem.feat.coulomb_matrices import CoulombMatrix
from deepchem.feat.coulomb_matrices import CoulombMatrixEig
from deepchem.feat.grid_featurizer import GridFeaturizer
from deepchem.feat.nnscore_utils import hydrogenate_and_compute_partial_charges |
a65eea671a605bdeef70160e2d967a59888fcd1b | molml/features.py | molml/features.py | from .atom import * # NOQA
from .molecule import * # NOQA
| from .atom import * # NOQA
from .molecule import * # NOQA
from .crystal import * # NOQA
from .kernel import * # NOQA
| Add default import for crystal and kernel | Add default import for crystal and kernel
| Python | mit | crcollins/molml | ---
+++
@@ -1,2 +1,4 @@
from .atom import * # NOQA
from .molecule import * # NOQA
+from .crystal import * # NOQA
+from .kernel import * # NOQA |
84bf7bd05b683890e817efb6d26f7176cf555f04 | rapydo/do/__main__.py | rapydo/do/__main__.py | # -*- coding: utf-8 -*-
"""
Command line script main
"""
from rapydo.do.app import Application
import better_exceptions as be
def main():
be # activate better exceptions
Application()
if __name__ == '__main__':
main()
| # -*- coding: utf-8 -*-
"""
Command line script: main function
"""
import better_exceptions as be
from rapydo.do.app import Application
from rapydo.utils.logs import get_logger
log = get_logger(__name__)
def main():
be # activate better exceptions
try:
Application()
except KeyboardInterrupt:
log.critical("Interrupted by the user")
else:
log.verbose("Application completed")
if __name__ == '__main__':
main()
| Handle nicely keyboard interruption (ctrl+c) | Handle nicely keyboard interruption (ctrl+c)
| Python | mit | rapydo/do | ---
+++
@@ -1,15 +1,24 @@
# -*- coding: utf-8 -*-
+
"""
- Command line script main
+ Command line script: main function
"""
+import better_exceptions as be
from rapydo.do.app import Application
-import better_exceptions as be
+from rapydo.utils.logs import get_logger
+
+log = get_logger(__name__)
def main():
be # activate better exceptions
- Application()
+ try:
+ Application()
+ except KeyboardInterrupt:
+ log.critical("Interrupted by the user")
+ else:
+ log.verbose("Application completed")
if __name__ == '__main__': |
fe974197217eff350f1dc0bc5687c83066d6dd34 | kaggle_tools/features_engineering/dates_engineering.py | kaggle_tools/features_engineering/dates_engineering.py | import pandas as pd
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
| import pandas as pd
import pytz
def date_features(input_df, datetime_column='tms_gmt'):
"""
Given a datetime column, extracts useful date information
(minute, hour, dow...)
"""
df = input_df.copy()
return (df.set_index(time_column)
.assign(minute=lambda df: df.index.minute,
hour=lambda df: df.index.hour,
day=lambda df: df.index.day,
dow=lambda df: df.index.dayofweek,
month=lambda df: df.index.month,
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
def localize_datetime(input_df, timezone='Europe/Paris',
datetime_column='tms_gmt'):
"""
Convert datetime column from UTC to another timezone.
"""
tmz = pytz.timezone(timezone)
df = input_df.copy()
return (df.set_index(datetime_column)
.tz_localize(pytz.utc) # UTC time
.tz_convert(tmz)) # Timezone time
| Add a datetime localization function | Add a datetime localization function
| Python | mit | yassineAlouini/kaggle-tools,yassineAlouini/kaggle-tools | ---
+++
@@ -1,4 +1,5 @@
import pandas as pd
+import pytz
def date_features(input_df, datetime_column='tms_gmt'):
@@ -17,3 +18,15 @@
week=lambda df: df.index.week,
woy=lambda df: df.index.weekofyear,
year=lambda df: df.index.year))
+
+
+def localize_datetime(input_df, timezone='Europe/Paris',
+ datetime_column='tms_gmt'):
+ """
+ Convert datetime column from UTC to another timezone.
+ """
+ tmz = pytz.timezone(timezone)
+ df = input_df.copy()
+ return (df.set_index(datetime_column)
+ .tz_localize(pytz.utc) # UTC time
+ .tz_convert(tmz)) # Timezone time |
cdb10489382144f77dbe720f230ae92020ffb66c | messaging/test/test_message.py | messaging/test/test_message.py | """Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# TODO(2016-07-10) Fix this race condition. It looks like if I send the
# message before the receiver has set up, the messages are never queued
# or something.
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
mp.kill()
consumer.join()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
| """Tests the message framework."""
import threading
import time
import unittest
from messaging.message_consumer import consume_messages
from messaging.message_producer import MessageProducer
class TestMessage(unittest.TestCase):
"""Tests the message framework."""
EXCHANGE = 'test'
def setUp(self):
self.message = None
def test_1_producer_1_consumer(self):
"""Test single producer single consumer."""
mp = MessageProducer(self.EXCHANGE)
def save_message(x):
self.message = x
def consume():
"""Function to consume messages."""
consume_messages(self.EXCHANGE, save_message)
consumer = threading.Thread(target=consume)
consumer.start()
# Give the receiver some time to set up, see comment below
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
for _ in range(10):
# Because of a race condition, if the message is sent before the
# receiver has set up, the messages are never queued or something.
# Keep resending until the thread exits.
consumer.join(0.05)
if consumer.is_alive():
mp.publish(sent_message)
mp.publish('QUIT')
consumer.join(0.05)
self.assertFalse(consumer.is_alive())
mp.kill()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
if __name__ == '__main__':
unittest.main()
| Make messaging test more reliable | Make messaging test more reliable
| Python | mit | bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc | ---
+++
@@ -30,16 +30,24 @@
consumer = threading.Thread(target=consume)
consumer.start()
- # TODO(2016-07-10) Fix this race condition. It looks like if I send the
- # message before the receiver has set up, the messages are never queued
- # or something.
+ # Give the receiver some time to set up, see comment below
time.sleep(0.05)
self.assertIs(self.message, None)
sent_message = 'banana'
mp.publish(sent_message)
mp.publish('QUIT')
+ for _ in range(10):
+ # Because of a race condition, if the message is sent before the
+ # receiver has set up, the messages are never queued or something.
+ # Keep resending until the thread exits.
+ consumer.join(0.05)
+ if consumer.is_alive():
+ mp.publish(sent_message)
+ mp.publish('QUIT')
+
+ consumer.join(0.05)
+ self.assertFalse(consumer.is_alive())
mp.kill()
- consumer.join()
self.assertEqual(self.message, bytes(sent_message, 'utf-8'))
|
41d379fcb1e3d1828e7898045cca0505cb47ae61 | xgds_data/defaultSettings.py | xgds_data/defaultSettings.py | # __BEGIN_LICENSE__
# Copyright (C) 2008-2010 United States Government as represented by
# the Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# __END_LICENSE__
"""
This app may define some new parameters that can be modified in the
Django settings module. Let's say one such parameter is XGDS_DATA_FOO.
The default value for XGDS_DATA_FOO is defined in this file, like this:
XGDS_DATA_FOO = 'my default value'
If the admin for the site doesn't like the default value, they can
override it in the site-level settings module, like this:
XGDS_DATA_FOO = 'a better value'
Other modules can access the value of FOO like this:
from xgds_data import settings
print settings.XGDS_DATA_FOO
Don't try to get the value of XGDS_DATA_FOO from django.conf.settings.
That settings object will not know about the default value!
"""
# choose models to support in siteSettings.py. mostly obsolete.
XGDS_DATA_SEARCH_MODELS = ()
# choose django apps not to list for search purposes
XGDS_DATA_SEARCH_SKIP_APP_PATTERNS = (
r'^django\..*',
r'^geocam.*',
r'^pipeline$',
)
XGDS_DATA_LOG_ENABLED = False | # __BEGIN_LICENSE__
# Copyright (C) 2008-2010 United States Government as represented by
# the Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# __END_LICENSE__
"""
This app may define some new parameters that can be modified in the
Django settings module. Let's say one such parameter is XGDS_DATA_FOO.
The default value for XGDS_DATA_FOO is defined in this file, like this:
XGDS_DATA_FOO = 'my default value'
If the admin for the site doesn't like the default value, they can
override it in the site-level settings module, like this:
XGDS_DATA_FOO = 'a better value'
Other modules can access the value of FOO like this:
from xgds_data import settings
print settings.XGDS_DATA_FOO
Don't try to get the value of XGDS_DATA_FOO from django.conf.settings.
That settings object will not know about the default value!
"""
# choose models to support in siteSettings.py. mostly obsolete.
XGDS_DATA_SEARCH_MODELS = ()
# choose django apps not to list for search purposes
XGDS_DATA_SEARCH_SKIP_APP_PATTERNS = (
r'^django\..*',
r'^geocam.*',
r'^pipeline$',
)
# XGDS_DATA_LOG_ENABLED = False
| Move log enabling out of submodule | Move log enabling out of submodule
| Python | apache-2.0 | xgds/xgds_data,xgds/xgds_data | ---
+++
@@ -35,4 +35,4 @@
r'^pipeline$',
)
-XGDS_DATA_LOG_ENABLED = False
+# XGDS_DATA_LOG_ENABLED = False |
98df7851355f81f8b6119781c003dc2f84b4dc85 | django-ssheepdog/setup.py | django-ssheepdog/setup.py | from setuptools import setup
description = 'ssheepdog'
long_desc = description
setup(
name='ssheepdog',
version='0.0.1',
url='https://github.com/SheepDogInc/ssheepdog',
install_requires=[
'django-celery',
'django-kombu',
],
description=description,
long_description=long_desc,
author='SheepDogInc',
author_email='info@sheepdoginc.ca',
packages=['ssheepdog']
)
| from setuptools import setup
description = 'ssheepdog'
long_desc = description
setup(
name='ssheepdog',
version='0.0.1',
url='https://github.com/SheepDogInc/ssheepdog',
install_requires=[
'django-celery',
'django-kombu',
'south',
'ssh'
],
description=description,
long_description=long_desc,
author='SheepDogInc',
author_email='info@sheepdoginc.ca',
packages=['ssheepdog']
)
| Add a couple of dependencies | Add a couple of dependencies
| Python | bsd-3-clause | SheepDogInc/ssheepdog,SheepDogInc/ssheepdog,SheepDogInc/ssheepdog | ---
+++
@@ -10,6 +10,8 @@
install_requires=[
'django-celery',
'django-kombu',
+ 'south',
+ 'ssh'
],
description=description,
long_description=long_desc, |
3a781fe4b579af41b2c4f0cb7ef30a36085152ed | djconnectwise/__init__.py | djconnectwise/__init__.py | # -*- coding: utf-8 -*-
VERSION = (0, 0, 92, 'alpha')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
| # -*- coding: utf-8 -*-
VERSION = (0, 0, 93, 'alpha')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
| Update to 0.0.93, because PyPI didn't seem to let anyone download 0.0.92 even though it uploaded fine. | Update to 0.0.93, because PyPI didn't seem to let anyone download 0.0.92 even though it uploaded fine.
| Python | mit | KerkhoffTechnologies/django-connectwise,KerkhoffTechnologies/django-connectwise | ---
+++
@@ -1,5 +1,5 @@
# -*- coding: utf-8 -*-
-VERSION = (0, 0, 92, 'alpha')
+VERSION = (0, 0, 93, 'alpha')
# pragma: no cover
if VERSION[-1] != "final": |
8b7d8919afa12f33bed096f49780914f8878163f | brew/__init__.py | brew/__init__.py | from flask import Flask
from flask.ext.pymongo import PyMongo
from flask.ext.babel import Babel
from flask.ext.cache import Cache
from brew.io import TemperatureController
from brew.state import Machine
app = Flask(__name__)
app.config.from_object('brew.settings')
babel = Babel(app)
cache = Cache(app)
mongo = PyMongo(app)
controller = TemperatureController(app)
machine = Machine(app, controller)
import brew.views
import brew.rest
| from flask import Flask
from flask.ext.pymongo import PyMongo
from flask.ext.babel import Babel
from flask.ext.cache import Cache
from brew.io import TemperatureController
from brew.state import Machine
app = Flask(__name__)
app.config.from_object('brew.settings')
app.config.from_pyfile('brewmeister.cfg', silent=True)
babel = Babel(app)
cache = Cache(app)
mongo = PyMongo(app)
controller = TemperatureController(app)
machine = Machine(app, controller)
import brew.views
import brew.rest
| Allow loading settings from brewmeister.cfg | Allow loading settings from brewmeister.cfg
| Python | mit | brewpeople/brewmeister,brewpeople/brewmeister,brewpeople/brewmeister | ---
+++
@@ -8,6 +8,7 @@
app = Flask(__name__)
app.config.from_object('brew.settings')
+app.config.from_pyfile('brewmeister.cfg', silent=True)
babel = Babel(app)
cache = Cache(app) |
e64acbdf756839adf28320641124ee696d38be54 | crispy_forms/tests/runtests.py | crispy_forms/tests/runtests.py | #!/usr/bin/env python
import os
import sys
cmds = [
'python runtests_bootstrap.py',
'python runtests_bootstrap3.py',
'python runtests_uniform.py',
]
for cmd in cmds:
retval = os.system(cmd)
if retval:
sys.exit(1)
| #!/usr/bin/env python
import os
import sys
import django
if django.VERSION < (1,6):
cmds = [
'python runtests_bootstrap_legacy.py',
'python runtests_bootstrap3_legacy.py',
'python runtests_uniform_legacy.py',
]
else:
cmds = [
'python runtests_bootstrap.py',
'python runtests_bootstrap3.py',
'python runtests_uniform.py',
]
for cmd in cmds:
retval = os.system(cmd)
if retval:
sys.exit(1)
| Use old legacy runtest files for Django 1.4 | Use old legacy runtest files for Django 1.4
| Python | mit | ngenovictor/django-crispy-forms,jtyoung/django-crispy-forms,VishvajitP/django-crispy-forms,davidszotten/django-crispy-forms,jtyoung/django-crispy-forms,schrd/django-crispy-forms,django-crispy-forms/django-crispy-forms,dzhuang/django-crispy-forms,davidszotten/django-crispy-forms,ngenovictor/django-crispy-forms,RamezIssac/django-crispy-forms,IanLee1521/django-crispy-forms,RamezIssac/django-crispy-forms,scuml/django-crispy-forms,Stranger6667/django-crispy-forms,tarunlnmiit/django-crispy-forms,saydulk/django-crispy-forms,smirolo/django-crispy-forms,Stranger6667/django-crispy-forms,maraujop/django-crispy-forms,avsd/django-crispy-forms,smirolo/django-crispy-forms,saydulk/django-crispy-forms,dessibelle/django-crispy-forms,dessibelle/django-crispy-forms,scuml/django-crispy-forms,tarunlnmiit/django-crispy-forms,maraujop/django-crispy-forms,alanwj/django-crispy-forms,damienjones/django-crispy-forms,schrd/django-crispy-forms,dzhuang/django-crispy-forms,VishvajitP/django-crispy-forms,alanwj/django-crispy-forms,avsd/django-crispy-forms,impulse-cloud/django-crispy-forms,impulse-cloud/django-crispy-forms,django-crispy-forms/django-crispy-forms,IanLee1521/django-crispy-forms,damienjones/django-crispy-forms | ---
+++
@@ -2,12 +2,20 @@
import os
import sys
+import django
-cmds = [
- 'python runtests_bootstrap.py',
- 'python runtests_bootstrap3.py',
- 'python runtests_uniform.py',
-]
+if django.VERSION < (1,6):
+ cmds = [
+ 'python runtests_bootstrap_legacy.py',
+ 'python runtests_bootstrap3_legacy.py',
+ 'python runtests_uniform_legacy.py',
+ ]
+else:
+ cmds = [
+ 'python runtests_bootstrap.py',
+ 'python runtests_bootstrap3.py',
+ 'python runtests_uniform.py',
+ ]
for cmd in cmds:
retval = os.system(cmd) |
ca6d0c5f0fc61ce7d939e49f276c36c5cb12a589 | backend/globaleaks/tests/utils/test_zipstream.py | backend/globaleaks/tests/utils/test_zipstream.py | # -*- encoding: utf-8 -*-
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream, get_compression_opts
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
for compression in ['zipstored', 'zipdeflated']:
with open(self.test_collection_file, 'w') as f:
opts = get_compression_opts(compression)
for data in ZipStream(self.files, opts['compression_type']):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
| # -*- encoding: utf-8 -*-
import os
from zipfile import ZipFile
from twisted.internet.defer import inlineCallbacks
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
from globaleaks.utils.zipstream import ZipStream
class TestCollection(helpers.TestGL):
files = []
@inlineCallbacks
def setUp(self):
yield helpers.TestGL.setUp(self)
self.test_collection_file = os.path.join(GLSettings.working_path, 'test.collection')
for k in self.internationalized_text:
self.files.append({'name': self.internationalized_text[k].encode('utf8'), 'buf': self.internationalized_text[k].encode('utf-8')})
def test_collection(self):
with open(self.test_collection_file, 'w') as f:
for data in ZipStream(self.files):
f.write(data)
with ZipFile(self.test_collection_file, 'r') as f:
self.assertIsNone(f.testzip())
| Simplify zipstream following the simplification of the zip routines implemented | Simplify zipstream following the simplification of the zip routines implemented
| Python | agpl-3.0 | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks | ---
+++
@@ -8,7 +8,7 @@
from globaleaks.db.datainit import load_appdata
from globaleaks.settings import GLSettings
from globaleaks.tests import helpers
-from globaleaks.utils.zipstream import ZipStream, get_compression_opts
+from globaleaks.utils.zipstream import ZipStream
class TestCollection(helpers.TestGL):
files = []
@@ -23,11 +23,9 @@
def test_collection(self):
- for compression in ['zipstored', 'zipdeflated']:
- with open(self.test_collection_file, 'w') as f:
- opts = get_compression_opts(compression)
- for data in ZipStream(self.files, opts['compression_type']):
- f.write(data)
+ with open(self.test_collection_file, 'w') as f:
+ for data in ZipStream(self.files):
+ f.write(data)
- with ZipFile(self.test_collection_file, 'r') as f:
- self.assertIsNone(f.testzip())
+ with ZipFile(self.test_collection_file, 'r') as f:
+ self.assertIsNone(f.testzip()) |
cdb66acf92dae34d1b17c2a429738c73fb06caad | scoring/checks/ldap.py | scoring/checks/ldap.py | from __future__ import absolute_import
from config import config
import ldap
# DEFAULTS
ldap_config = {
'timeout': 5
}
# /DEFAULTS
# CONFIG
if "ldap" in config:
ldap_config.update(config["ldap"])
# /CONFIG
def check_ldap_lookup(check, data):
check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName()))
check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server")
check.addOutput("OUTPUT:\n")
check.addOutput("Starting check...")
try:
# Setup LDAP
l = ldap.initialize('ldap://%s' % data["HOST"])
# Bind to the user we're using to lookup
username = data["USER"]
password = data["PASS"]
l.protocol_version = ldap.VERSION3
l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"])
l.simple_bind_s(username, password)
# We're good!
check.setPassed()
check.addOutput("Check successful!")
except Exception as e:
check.addOutput("ERROR: %s: %s" % (type(e).__name__, e))
return | from __future__ import absolute_import
from config import config
import ldap
# DEFAULTS
ldap_config = {
'timeout': 5
}
# /DEFAULTS
# CONFIG
if "ldap" in config:
ldap_config.update(config["ldap"])
# /CONFIG
def check_ldap_lookup(check, data):
check.addOutput("ScoreEngine: %s Check\n" % (check.getServiceName()))
check.addOutput("EXPECTED: Sucessful and correct query against the AD (LDAP) server")
check.addOutput("OUTPUT:\n")
check.addOutput("Starting check...")
try:
# Setup LDAP
l = ldap.initialize('ldap://%s' % data["HOST"])
# Bind to the user we're using to lookup
domain = data["DOMAIN"]
username = data["USER"]
password = data["PASS"]
actual_username = "%s\%s" % (domain, username)
l.protocol_version = ldap.VERSION3
l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"])
l.simple_bind_s(actual_username, password)
# We're good!
check.setPassed()
check.addOutput("Check successful!")
except Exception as e:
check.addOutput("ERROR: %s: %s" % (type(e).__name__, e))
return | Make LDAP check even better | Make LDAP check even better
| Python | mit | ubnetdef/scoreengine,ubnetdef/scoreengine | ---
+++
@@ -25,12 +25,15 @@
l = ldap.initialize('ldap://%s' % data["HOST"])
# Bind to the user we're using to lookup
+ domain = data["DOMAIN"]
username = data["USER"]
password = data["PASS"]
+ actual_username = "%s\%s" % (domain, username)
+
l.protocol_version = ldap.VERSION3
l.set_option(ldap.OPT_NETWORK_TIMEOUT, ldap_config["timeout"])
- l.simple_bind_s(username, password)
+ l.simple_bind_s(actual_username, password)
# We're good!
check.setPassed() |
7576d63bc2061074a41685c69546a4a5d57bc3fb | unihan_db/__about__.py | unihan_db/__about__.py | __title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
| __title__ = 'unihan-db'
__package_name__ = 'unihan_db'
__description__ = 'SQLAlchemy models for UNIHAN database'
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
__docs__ = 'https://unihan-db.git-pull.com'
__tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2017- cihai software foundation'
| Add docs and tracker to metadata | Add docs and tracker to metadata
| Python | mit | cihai/unihan-db | ---
+++
@@ -4,6 +4,8 @@
__version__ = '0.1.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/cihai/unihan-db'
+__docs__ = 'https://unihan-db.git-pull.com'
+__tracker__ = 'https://github.com/cihai/unihan-db/issues'
__pypi__ = 'https://pypi.org/project/unihan-db/'
__email__ = 'cihai@git-pull.com'
__license__ = 'MIT' |
6b97c1bdcb7d7152c7c0a14833caadbf3aa5ad04 | lms/djangoapps/coursewarehistoryextended/fields.py | lms/djangoapps/coursewarehistoryextended/fields.py | """
Custom fields for use in the coursewarehistoryextended django app.
"""
from django.db.models.fields import AutoField
class UnsignedBigIntAutoField(AutoField):
"""
An unsigned 8-byte integer for auto-incrementing primary keys.
"""
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
return "bigint UNSIGNED AUTO_INCREMENT"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3':
# Sqlite will only auto-increment the ROWID column. Any INTEGER PRIMARY KEY column
# is an alias for that (https://www.sqlite.org/autoinc.html). An unsigned integer
# isn't an alias for ROWID, so we have to give up on the unsigned part.
return "integer"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
# Pg's bigserial is implicitly unsigned (doesn't allow negative numbers) and
# goes 1-9.2x10^18
return "BIGSERIAL"
else:
return None
| """
Custom fields for use in the coursewarehistoryextended django app.
"""
from django.db.models.fields import AutoField
class UnsignedBigIntAutoField(AutoField):
"""
An unsigned 8-byte integer for auto-incrementing primary keys.
"""
def db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
return "bigint UNSIGNED AUTO_INCREMENT"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3':
# Sqlite will only auto-increment the ROWID column. Any INTEGER PRIMARY KEY column
# is an alias for that (https://www.sqlite.org/autoinc.html). An unsigned integer
# isn't an alias for ROWID, so we have to give up on the unsigned part.
return "integer"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
# Pg's bigserial is implicitly unsigned (doesn't allow negative numbers) and
# goes 1-9.2x10^18
return "BIGSERIAL"
else:
return None
def rel_db_type(self, connection):
if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
return "bigint UNSIGNED"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3':
return "integer"
elif connection.settings_dict['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
return "BIGSERIAL"
else:
return None
| Add a rel_db_type to UnsignedBigIntAutoField | Add a rel_db_type to UnsignedBigIntAutoField
| Python | agpl-3.0 | proversity-org/edx-platform,angelapper/edx-platform,gymnasium/edx-platform,kmoocdev2/edx-platform,jolyonb/edx-platform,TeachAtTUM/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,ahmedaljazzar/edx-platform,kmoocdev2/edx-platform,hastexo/edx-platform,hastexo/edx-platform,hastexo/edx-platform,msegado/edx-platform,Lektorium-LLC/edx-platform,appsembler/edx-platform,BehavioralInsightsTeam/edx-platform,gymnasium/edx-platform,stvstnfrd/edx-platform,ESOedX/edx-platform,TeachAtTUM/edx-platform,kmoocdev2/edx-platform,eduNEXT/edunext-platform,pabloborrego93/edx-platform,philanthropy-u/edx-platform,teltek/edx-platform,EDUlib/edx-platform,Stanford-Online/edx-platform,procangroup/edx-platform,edx/edx-platform,Stanford-Online/edx-platform,edx-solutions/edx-platform,procangroup/edx-platform,gymnasium/edx-platform,ahmedaljazzar/edx-platform,procangroup/edx-platform,lduarte1991/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,appsembler/edx-platform,BehavioralInsightsTeam/edx-platform,ahmedaljazzar/edx-platform,a-parhom/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,gsehub/edx-platform,ahmedaljazzar/edx-platform,edx-solutions/edx-platform,mitocw/edx-platform,mitocw/edx-platform,proversity-org/edx-platform,Lektorium-LLC/edx-platform,lduarte1991/edx-platform,CredoReference/edx-platform,edx/edx-platform,CredoReference/edx-platform,teltek/edx-platform,arbrandes/edx-platform,CredoReference/edx-platform,edx/edx-platform,EDUlib/edx-platform,stvstnfrd/edx-platform,msegado/edx-platform,pabloborrego93/edx-platform,a-parhom/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,cpennington/edx-platform,Stanford-Online/edx-platform,philanthropy-u/edx-platform,Lektorium-LLC/edx-platform,proversity-org/edx-platform,jolyonb/edx-platform,edx-solutions/edx-platform,a-parhom/edx-platform,ESOedX/edx-platform,a-parhom/edx-platform,mitocw/edx-platform,TeachAtTUM/edx-platform,cpennington/edx-platform,ESOedX/edx-platform,Stanford-Online/edx-platform,appsembler/edx-platform,angelapper/edx-platform,msegado/edx-platform,CredoReference/edx-platform,philanthropy-u/edx-platform,gsehub/edx-platform,gsehub/edx-platform,lduarte1991/edx-platform,edx-solutions/edx-platform,pabloborrego93/edx-platform,jolyonb/edx-platform,stvstnfrd/edx-platform,philanthropy-u/edx-platform,pabloborrego93/edx-platform,Edraak/edraak-platform,Edraak/edraak-platform,teltek/edx-platform,eduNEXT/edx-platform,eduNEXT/edunext-platform,hastexo/edx-platform,gymnasium/edx-platform,teltek/edx-platform,EDUlib/edx-platform,TeachAtTUM/edx-platform,lduarte1991/edx-platform,gsehub/edx-platform,Lektorium-LLC/edx-platform,eduNEXT/edx-platform,ESOedX/edx-platform,msegado/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,Edraak/edraak-platform,msegado/edx-platform,Edraak/edraak-platform,appsembler/edx-platform,jolyonb/edx-platform,mitocw/edx-platform,kmoocdev2/edx-platform,stvstnfrd/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,procangroup/edx-platform,BehavioralInsightsTeam/edx-platform | ---
+++
@@ -23,3 +23,13 @@
return "BIGSERIAL"
else:
return None
+
+ def rel_db_type(self, connection):
+ if connection.settings_dict['ENGINE'] == 'django.db.backends.mysql':
+ return "bigint UNSIGNED"
+ elif connection.settings_dict['ENGINE'] == 'django.db.backends.sqlite3':
+ return "integer"
+ elif connection.settings_dict['ENGINE'] == 'django.db.backends.postgresql_psycopg2':
+ return "BIGSERIAL"
+ else:
+ return None |
dd32edc5d42550c57901923d110324e7980f961f | careers/careers/urls.py | careers/careers/urls.py | from django.conf.urls import url
from . import views
from .feeds import LatestPositionsFeed
urlpatterns = [
url(r'^position/(?P<source>[\w]+)/(?P<job_id>[\w]+)$', views.PositionDetailView.as_view(),
name='careers.position'),
url(r'^$', views.HomeView.as_view(), name='careers.home'),
url(r'^feed/$', LatestPositionsFeed(), name='careers.feed'),
url(r'^listings/$', views.PositionListView.as_view(), name='careers.listings'),
]
| from django.conf.urls import url
from . import views
from .feeds import LatestPositionsFeed
urlpatterns = [
url(r'^position/(?P<source>[\w]+)/(?P<job_id>[\w]+)/$', views.PositionDetailView.as_view(),
name='careers.position'),
url(r'^$', views.HomeView.as_view(), name='careers.home'),
url(r'^feed/$', LatestPositionsFeed(), name='careers.feed'),
url(r'^listings/$', views.PositionListView.as_view(), name='careers.listings'),
]
| Add ending slash in position URLs. | Add ending slash in position URLs.
| Python | mpl-2.0 | mozilla/lumbergh,alexgibson/lumbergh,alexgibson/lumbergh,glogiotatidis/lumbergh,glogiotatidis/lumbergh,glogiotatidis/lumbergh,glogiotatidis/lumbergh,alexgibson/lumbergh,alexgibson/lumbergh,mozilla/lumbergh,mozilla/lumbergh,mozilla/lumbergh | ---
+++
@@ -4,7 +4,7 @@
from .feeds import LatestPositionsFeed
urlpatterns = [
- url(r'^position/(?P<source>[\w]+)/(?P<job_id>[\w]+)$', views.PositionDetailView.as_view(),
+ url(r'^position/(?P<source>[\w]+)/(?P<job_id>[\w]+)/$', views.PositionDetailView.as_view(),
name='careers.position'),
url(r'^$', views.HomeView.as_view(), name='careers.home'),
url(r'^feed/$', LatestPositionsFeed(), name='careers.feed'), |
7ed3ba20aae568d0c12ec361210d1189ecd534cf | lazysignup/backends.py | lazysignup/backends.py | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
class LazySignupBackend(ModelBackend):
def authenticate(self, username=None):
users = [u for u in User.objects.filter(username=username)
if not u.has_usable_password()]
if len(users) != 1:
return None
return users[0]
def get_user(self, user_id):
# Annotate the user with our backend so it's always available,
# not just when authenticate() has been called. This will be
# used by the is_lazy_user filter.
user = super(LazySignupBackend, self).get_user(user_id)
if user:
user.backend = 'lazysignup.backends.LazySignupBackend'
return user
| from django.contrib.auth.backends import ModelBackend
from lazysignup.models import LazyUser
class LazySignupBackend(ModelBackend):
def authenticate(self, username=None):
lazy_users = LazyUser.objects.filter(
user__username=username
).select_related('user')
try:
return lazy_users[0].user
except IndexError:
return None
def get_user(self, user_id):
# Annotate the user with our backend so it's always available,
# not just when authenticate() has been called. This will be
# used by the is_lazy_user filter.
user = super(LazySignupBackend, self).get_user(user_id)
if user:
user.backend = 'lazysignup.backends.LazySignupBackend'
return user
| Remove the lazy signup backend's hard dependency on django.contrib.auth.user (and remove the inconsistency in checking for whether a user is lazy or not). | Remove the lazy signup backend's hard dependency on django.contrib.auth.user (and remove the inconsistency in checking for whether a user is lazy or not). | Python | bsd-3-clause | stefanklug/django-lazysignup,rwillmer/django-lazysignup,rwillmer/django-lazysignup,danfairs/django-lazysignup,stefanklug/django-lazysignup,danfairs/django-lazysignup | ---
+++
@@ -1,14 +1,16 @@
from django.contrib.auth.backends import ModelBackend
-from django.contrib.auth.models import User
+from lazysignup.models import LazyUser
class LazySignupBackend(ModelBackend):
def authenticate(self, username=None):
- users = [u for u in User.objects.filter(username=username)
- if not u.has_usable_password()]
- if len(users) != 1:
+ lazy_users = LazyUser.objects.filter(
+ user__username=username
+ ).select_related('user')
+ try:
+ return lazy_users[0].user
+ except IndexError:
return None
- return users[0]
def get_user(self, user_id):
# Annotate the user with our backend so it's always available,
@@ -18,3 +20,4 @@
if user:
user.backend = 'lazysignup.backends.LazySignupBackend'
return user
+ |
501bcb9aab561f9155857e1601a303374ae5698c | byceps/typing.py | byceps/typing.py | """
byceps.typing
~~~~~~~~~~~~~
BYCEPS-specific type aliases for PEP 484 type hints
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from uuid import UUID
UserID = UUID
BrandID = str
PartyID = str
| """
byceps.typing
~~~~~~~~~~~~~
BYCEPS-specific type aliases for PEP 484 type hints
:Copyright: 2006-2017 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from typing import NewType
from uuid import UUID
UserID = NewType('UserID', UUID)
BrandID = NewType('BrandID', str)
PartyID = NewType('PartyID', str)
| Make BYCEPS-specific types actual lightweight types | Make BYCEPS-specific types actual lightweight types
This makes mypy report the custom type names in error messages instead of the existing types the alias.
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps | ---
+++
@@ -8,11 +8,12 @@
:License: Modified BSD, see LICENSE for details.
"""
+from typing import NewType
from uuid import UUID
-UserID = UUID
+UserID = NewType('UserID', UUID)
-BrandID = str
+BrandID = NewType('BrandID', str)
-PartyID = str
+PartyID = NewType('PartyID', str) |
b66171d0b3d8cdf361f4341976d7eb0830fb38ce | dribdat/boxout/dribdat.py | dribdat/boxout/dribdat.py | """Boxout module for Dribdat projects."""
import pystache
TEMPLATE_PROJECT = r"""
<div class="onebox honeycomb">
<a href="{{link}}"
class="hexagon
{{#is_challenge}}challenge{{/is_challenge}}
{{^is_challenge}}project stage-{{progress}}{{/is_challenge}}">
<div class="hexagontent">
{{#image_url}}
<div class="hexaicon" style="background-image:url('{{image_url}}')"></div>
{{/image_url}}
</div>
</a>
<a href="{{link}}" class="title">{{name}}</a>
<div class="event-detail">
<span>{{event_name}}</span>
<i class="phase">{{phase}}</i>
</div>
<p>{{summary}}</p>
</div>
"""
def box_project(url):
"""Create a OneBox for local projects."""
project_id = url.split('/')[-1]
if not project_id:
return None
from ..user.models import Project
project = Project.query.filter_by(id=int(project_id)).first()
if not project:
return None
pd = project.data
# project.url returns a relative path
pd['link'] = url
return pystache.render(TEMPLATE_PROJECT, pd)
| """Boxout module for Dribdat projects."""
import pystache
TEMPLATE_PROJECT = r"""
<div class="onebox honeycomb">
<a href="{{link}}"
class="hexagon
{{#is_challenge}}challenge{{/is_challenge}}
{{^is_challenge}}project stage-{{progress}}{{/is_challenge}}">
<div class="hexagontent">
{{#image_url}}
<div class="hexaicon" style="background-image:url('{{image_url}}')"></div>
{{/image_url}}
</div>
</a>
<a href="{{link}}" class="title">{{name}}</a>
<div class="event-detail">
<span>{{event_name}}</span>
<i class="phase">{{phase}}</i>
</div>
<p>{{summary}}</p>
</div>
"""
def box_project(url):
"""Create a OneBox for local projects."""
project_id = url.split('/')[-1].split('#')[0]
if not project_id or not project_id.isnumeric():
return None
from ..user.models import Project
project = Project.query.filter_by(id=int(project_id)).first()
if not project:
return None
pd = project.data
# project.url returns a relative path
pd['link'] = url
return pystache.render(TEMPLATE_PROJECT, pd)
| Fix project links broken by anchor | Fix project links broken by anchor
| Python | mit | loleg/dribdat,loleg/dribdat,loleg/dribdat,loleg/dribdat | ---
+++
@@ -26,8 +26,8 @@
def box_project(url):
"""Create a OneBox for local projects."""
- project_id = url.split('/')[-1]
- if not project_id:
+ project_id = url.split('/')[-1].split('#')[0]
+ if not project_id or not project_id.isnumeric():
return None
from ..user.models import Project
project = Project.query.filter_by(id=int(project_id)).first() |
a7058352df6cd8c0e411df5e1b0948729f8ffe60 | dezede/__init__.py | dezede/__init__.py | # coding: utf-8
from __future__ import unicode_literals
__version__ = 1, 8, 3
get_version = lambda: '.'.join(str(i) for i in __version__)
__verbose_name__ = 'Dezède'
| # coding: utf-8
from __future__ import unicode_literals
__version__ = 2, 0, 0, 'pre'
get_version = lambda: '.'.join(str(i) for i in __version__)
__verbose_name__ = 'Dezède'
| Change le numéro de version pour 2.0.0.pre | Change le numéro de version pour 2.0.0.pre
| Python | bsd-3-clause | dezede/dezede,dezede/dezede,dezede/dezede,dezede/dezede | ---
+++
@@ -3,6 +3,6 @@
from __future__ import unicode_literals
-__version__ = 1, 8, 3
+__version__ = 2, 0, 0, 'pre'
get_version = lambda: '.'.join(str(i) for i in __version__)
__verbose_name__ = 'Dezède' |
a6b2aeb6a6f28e6b5a00719a34454894f933288e | lamana/__init__.py | lamana/__init__.py | # -----------------------------------------------------------------------------
__title__ = 'lamana'
__version__ = '0.4.10'
__author__ = 'P. Robinson II'
__license__ = 'BSD3'
__copyright__ = 'Copyright 2015 P. Robinson II'
# DEPRECATE: Renaming in 0.4.5b1+
##import LamAna.input_
##import LamAna.distributions
##import LamAna.ratios
##import LamAna.predictions
##import LamAna.constructs
##import LamAna.theories
##import LamAna.output_
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
| # -----------------------------------------------------------------------------
__title__ = 'lamana'
__version__ = '0.4.11'
__author__ = 'P. Robinson II'
__license__ = 'BSD3'
__copyright__ = 'Copyright 2015 P. Robinson II'
# DEPRECATE: Renaming in 0.4.5b1+
##import LamAna.input_
##import LamAna.distributions
##import LamAna.ratios
##import LamAna.predictions
##import LamAna.constructs
##import LamAna.theories
##import LamAna.output_
import lamana.input_
import lamana.distributions
import lamana.constructs
import lamana.theories
import lamana.output_
#from lamana.models import *
#import lamana.ratios
#import lamana.predictions
#import lamana.gamuts
| Test hotfix with advanced develop and cloned repo | Test hotfix with advanced develop and cloned repo
| Python | bsd-3-clause | par2/lamana-test | ---
+++
@@ -1,7 +1,7 @@
# -----------------------------------------------------------------------------
__title__ = 'lamana'
-__version__ = '0.4.10'
+__version__ = '0.4.11'
__author__ = 'P. Robinson II'
__license__ = 'BSD3'
__copyright__ = 'Copyright 2015 P. Robinson II' |
c93a7d6536b2a88ecf8c831714531837f4febd1b | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/urls.py | {{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/urls.py | from django.conf.urls import * # NOQA
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
import django.views.static
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = [
url(r'^media/(?P<path>.*)$', django.views.static.serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
] + staticfiles_urlpatterns() + urlpatterns
| from django.conf.urls import * # NOQA
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
import django.views.static
admin.autodiscover()
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
]
# This is only needed when using runserver.
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^media/(?P<path>.*)$', django.views.static.serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
url(r'^__debug__/', include(debug_toolbar.urls)),
] + staticfiles_urlpatterns() + urlpatterns
| Fix Django Debug Toolbar installation for new DjDT version | Fix Django Debug Toolbar installation for new DjDT version
| Python | mit | r0x73/django-template,r0x73/django-template,r0x73/django-template | ---
+++
@@ -12,7 +12,9 @@
# This is only needed when using runserver.
if settings.DEBUG:
+ import debug_toolbar
urlpatterns = [
url(r'^media/(?P<path>.*)$', django.views.static.serve,
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
+ url(r'^__debug__/', include(debug_toolbar.urls)),
] + staticfiles_urlpatterns() + urlpatterns |
8f04b56a842fa1a84e704af3c5b724c14006315e | server/models/user.py | server/models/user.py | from app_factory import db
from models.session import Session
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column('name', db.String(50))
username = db.Column('username', db.String(50))
password = db.Column('password', db.String(50))
email = db.Column('email', db.String(128))
session = db.relationship(
Session, uselist=False, backref=db.backref('user', order_by=id)
)
def __repr__(self):
return ''
'<User(name={name}, username={username}, '
'password={password}, email={email})>'.format(
name=self.name, username=self.username,
password=self.password, email=self.email
)
def __init__(self, name, username, password, email):
self.name = name
self.username = username
self.password = password
self.email = email
| from app_factory import db
from models.session import Session
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
name = db.Column('name', db.String(50))
username = db.Column('username', db.String(50))
password = db.Column('password', db.String(50))
email = db.Column('email', db.String(128))
session = db.relationship(
Session, uselist=False, backref=db.backref('user', order_by=id)
)
def __init__(self, name, username, password, email):
self.name = name
self.username = username
self.password = password
self.email = email
def __repr__(self):
return ''
'<User(name={name}, username={username}, '
'password={password}, email={email})>'.format(
name=self.name, username=self.username,
password=self.password, email=self.email
)
def is_authenticated(self):
return (hasattr(self.session.session_id) and
self.session.session_id is not None)
def is_active(self):
return True
def is_anonymous(self):
return False
def get_id(self):
return self.id
| Implement properties and methods for the User model class to enable the Flask-Login module | Implement properties and methods for the User model class to enable the Flask-Login module
| Python | mit | ganemone/ontheside,ganemone/ontheside,ganemone/ontheside | ---
+++
@@ -14,6 +14,12 @@
Session, uselist=False, backref=db.backref('user', order_by=id)
)
+ def __init__(self, name, username, password, email):
+ self.name = name
+ self.username = username
+ self.password = password
+ self.email = email
+
def __repr__(self):
return ''
'<User(name={name}, username={username}, '
@@ -22,8 +28,15 @@
password=self.password, email=self.email
)
- def __init__(self, name, username, password, email):
- self.name = name
- self.username = username
- self.password = password
- self.email = email
+ def is_authenticated(self):
+ return (hasattr(self.session.session_id) and
+ self.session.session_id is not None)
+
+ def is_active(self):
+ return True
+
+ def is_anonymous(self):
+ return False
+
+ def get_id(self):
+ return self.id |
fe772c380c48eb14fc47bbd0a0c95888b9ea700a | jsonschema/__init__.py | jsonschema/__init__.py | """
An implementation of JSON Schema for Python
The main functionality is provided by the validator classes for each of the
supported JSON Schema versions.
Most commonly, `validate` is the quickest way to simply validate a given
instance under a schema, and will create a validator for you.
"""
from jsonschema.exceptions import (
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
)
from jsonschema._format import (
FormatChecker,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
)
from jsonschema._types import TypeChecker
from jsonschema.validators import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
RefResolver,
validate,
)
import importlib_metadata
__version__ = importlib_metadata.version("jsonschema")
| """
An implementation of JSON Schema for Python
The main functionality is provided by the validator classes for each of the
supported JSON Schema versions.
Most commonly, `validate` is the quickest way to simply validate a given
instance under a schema, and will create a validator for you.
"""
from jsonschema.exceptions import (
ErrorTree, FormatError, RefResolutionError, SchemaError, ValidationError
)
from jsonschema._format import (
FormatChecker,
draft3_format_checker,
draft4_format_checker,
draft6_format_checker,
draft7_format_checker,
)
from jsonschema._types import TypeChecker
from jsonschema.validators import (
Draft3Validator,
Draft4Validator,
Draft6Validator,
Draft7Validator,
RefResolver,
validate,
)
try:
from importlib import metadata
except ImportError: # for Python<3.8
import importlib_metadata as metdata
__version__ = metadata.version("jsonschema")
| Use importlib from std (if possible) | Use importlib from std (if possible) | Python | mit | Julian/jsonschema,Julian/jsonschema,python-jsonschema/jsonschema | ---
+++
@@ -27,6 +27,8 @@
RefResolver,
validate,
)
-
-import importlib_metadata
-__version__ = importlib_metadata.version("jsonschema")
+try:
+ from importlib import metadata
+except ImportError: # for Python<3.8
+ import importlib_metadata as metdata
+__version__ = metadata.version("jsonschema") |
dfc885784a869dc3f3ef200557be4303aa2752e9 | ldap_sync/callbacks.py | ldap_sync/callbacks.py | def removed_user_deactivate(user):
if user.is_active:
user.is_active = False
user.save()
def removed_user_delete(user):
user.delete()
| def user_active_directory_deactivate(user, attributes, created, updated):
"""
Deactivate user accounts based on Active Directory's
userAccountControl flags. Requires 'userAccountControl'
to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES.
"""
try:
user_account_control = int(attributes['userAccountControl'][0])
if user_account_control & 2:
user.is_active = False
except KeyError:
pass
def removed_user_deactivate(user):
"""
Deactivate user accounts that no longer appear in the
source LDAP server.
"""
if user.is_active:
user.is_active = False
user.save()
def removed_user_delete(user):
"""
Delete user accounts that no longer appear in the
source LDAP server.
"""
user.delete()
| Add callback to disable users by AD userAccountControl flags | Add callback to disable users by AD userAccountControl flags
| Python | bsd-3-clause | jbittel/django-ldap-sync,alexsilva/django-ldap-sync,alexsilva/django-ldap-sync | ---
+++
@@ -1,8 +1,30 @@
+def user_active_directory_deactivate(user, attributes, created, updated):
+ """
+ Deactivate user accounts based on Active Directory's
+ userAccountControl flags. Requires 'userAccountControl'
+ to be included in LDAP_SYNC_USER_EXTRA_ATTRIBUTES.
+ """
+ try:
+ user_account_control = int(attributes['userAccountControl'][0])
+ if user_account_control & 2:
+ user.is_active = False
+ except KeyError:
+ pass
+
+
def removed_user_deactivate(user):
+ """
+ Deactivate user accounts that no longer appear in the
+ source LDAP server.
+ """
if user.is_active:
user.is_active = False
user.save()
def removed_user_delete(user):
+ """
+ Delete user accounts that no longer appear in the
+ source LDAP server.
+ """
user.delete() |
1f6cb706d16153814ff5821998f0fb9216357573 | src/main.py | src/main.py | #!/usr/bin/env python
if __name__ == "__main__":
run()
| #!/usr/bin/env python
"""Simple Genetic Algorithm for solving TSP.
Usage:
main.py FILE --pop-size=POP_SIZE --max-gen=MAX_GENERATIONS --xover-rate=CROSSOVER_RATE --mute-rate=MUTATION_RATE --num-elites=NUM_ELITES --tournament-k=K
main.py (-h | --help)
main.py (-v | --version)
Options:
-h --help Show this screen.
-v --version Show version.
"""
from docopt import docopt
class Genotype(object):
"""The population of chromosomes."""
def __init__(self, population, elite):
self.population = population
# TODO: new_pop?
self.elite = elite
if __name__ == '__main__':
arguments = docopt(__doc__, version='0.1')
print(arguments)
# TODO
# read file
# initialize pop
# run ga (break down)
# Default crossover is PMX. Can also use CX.
| Build interface w/ docopt and add POD | Build interface w/ docopt and add POD
| Python | unlicense | dideler/intro-to-genetic-algorithms | ---
+++
@@ -1,4 +1,33 @@
#!/usr/bin/env python
-if __name__ == "__main__":
- run()
+"""Simple Genetic Algorithm for solving TSP.
+
+Usage:
+ main.py FILE --pop-size=POP_SIZE --max-gen=MAX_GENERATIONS --xover-rate=CROSSOVER_RATE --mute-rate=MUTATION_RATE --num-elites=NUM_ELITES --tournament-k=K
+ main.py (-h | --help)
+ main.py (-v | --version)
+
+Options:
+ -h --help Show this screen.
+ -v --version Show version.
+
+"""
+from docopt import docopt
+
+class Genotype(object):
+ """The population of chromosomes."""
+
+ def __init__(self, population, elite):
+ self.population = population
+ # TODO: new_pop?
+ self.elite = elite
+
+if __name__ == '__main__':
+ arguments = docopt(__doc__, version='0.1')
+ print(arguments)
+ # TODO
+ # read file
+ # initialize pop
+ # run ga (break down)
+
+# Default crossover is PMX. Can also use CX. |
caf554698fd55f2911742b2cdfb7f1579c0738a5 | examples/prompts/autocompletion-like-readline.py | examples/prompts/autocompletion-like-readline.py | #!/usr/bin/env python
"""
Autocompletion example that displays the autocompletions like readline does by
binding a custom handler to the Tab key.
"""
from __future__ import unicode_literals
from prompt_toolkit.shortcuts import prompt, CompleteStyle
from prompt_toolkit.contrib.completers import WordCompleter
animal_completer = WordCompleter([
'alligator', 'ant', 'ape', 'bat', 'bear', 'beaver', 'bee', 'bison',
'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphine',
'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangoroo',
'leopard', 'lion', 'mouse', 'rabbit', 'rat', 'snake', 'spider', 'turkey',
'turtle',
], ignore_case=True)
def main():
text = prompt('Give some animals: ', completer=animal_completer,
complete_style=CompleteStyle.READLINE_LIKE)
print('You said: %s' % text)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
"""
Autocompletion example that displays the autocompletions like readline does by
binding a custom handler to the Tab key.
"""
from __future__ import unicode_literals
from prompt_toolkit.shortcuts import prompt, CompleteStyle
from prompt_toolkit.contrib.completers import WordCompleter
animal_completer = WordCompleter([
'alligator', 'ant', 'ape', 'bat', 'bear', 'beaver', 'bee', 'bison',
'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphin',
'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangaroo',
'leopard', 'lion', 'mouse', 'rabbit', 'rat', 'snake', 'spider', 'turkey',
'turtle',
], ignore_case=True)
def main():
text = prompt('Give some animals: ', completer=animal_completer,
complete_style=CompleteStyle.READLINE_LIKE)
print('You said: %s' % text)
if __name__ == '__main__':
main()
| Fix typos: `dolphine` -> `dolphin`, `kangoroo` -> `kangaroo` | Fix typos: `dolphine` -> `dolphin`, `kangoroo` -> `kangaroo`
| Python | bsd-3-clause | jonathanslenders/python-prompt-toolkit | ---
+++
@@ -11,8 +11,8 @@
animal_completer = WordCompleter([
'alligator', 'ant', 'ape', 'bat', 'bear', 'beaver', 'bee', 'bison',
- 'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphine',
- 'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangoroo',
+ 'butterfly', 'cat', 'chicken', 'crocodile', 'dinosaur', 'dog', 'dolphin',
+ 'dove', 'duck', 'eagle', 'elephant', 'fish', 'goat', 'gorilla', 'kangaroo',
'leopard', 'lion', 'mouse', 'rabbit', 'rat', 'snake', 'spider', 'turkey',
'turtle',
], ignore_case=True) |
f632c56310ce527d0773e64fed9fa4e7e0eae36a | test/on_yubikey/test_cli_misc.py | test/on_yubikey/test_cli_misc.py | import unittest
from .util import (DestructiveYubikeyTestCase, is_fips, ykman_cli)
class TestYkmanInfo(DestructiveYubikeyTestCase):
def test_ykman_info(self):
info = ykman_cli('info')
self.assertIn('Device type:', info)
self.assertIn('Serial number:', info)
self.assertIn('Firmware version:', info)
@unittest.skipIf(is_fips(), 'Not applicable to YubiKey FIPS.')
def test_ykman_info_does_not_report_fips_for_non_fips_device(self):
info = ykman_cli('info --check-fips')
self.assertNotIn('FIPS', info)
@unittest.skipIf(not is_fips(), 'YubiKey FIPS required.')
def test_ykman_info_reports_fips_status(self):
info = ykman_cli('info', '--check-fips')
self.assertIn('FIPS Approved Mode:', info)
self.assertIn(' FIDO U2F:', info)
self.assertIn(' OATH:', info)
self.assertIn(' OTP:', info)
| import unittest
from .util import (DestructiveYubikeyTestCase, is_fips, ykman_cli)
class TestYkmanInfo(DestructiveYubikeyTestCase):
def test_ykman_info(self):
info = ykman_cli('info')
self.assertIn('Device type:', info)
self.assertIn('Serial number:', info)
self.assertIn('Firmware version:', info)
@unittest.skipIf(is_fips(), 'Not applicable to YubiKey FIPS.')
def test_ykman_info_does_not_report_fips_for_non_fips_device(self):
info = ykman_cli('info', '--check-fips')
self.assertNotIn('FIPS', info)
@unittest.skipIf(not is_fips(), 'YubiKey FIPS required.')
def test_ykman_info_reports_fips_status(self):
info = ykman_cli('info', '--check-fips')
self.assertIn('FIPS Approved Mode:', info)
self.assertIn(' FIDO U2F:', info)
self.assertIn(' OATH:', info)
self.assertIn(' OTP:', info)
| Fix --check-fips invocation in test | Fix --check-fips invocation in test
| Python | bsd-2-clause | Yubico/yubikey-manager,Yubico/yubikey-manager | ---
+++
@@ -13,7 +13,7 @@
@unittest.skipIf(is_fips(), 'Not applicable to YubiKey FIPS.')
def test_ykman_info_does_not_report_fips_for_non_fips_device(self):
- info = ykman_cli('info --check-fips')
+ info = ykman_cli('info', '--check-fips')
self.assertNotIn('FIPS', info)
@unittest.skipIf(not is_fips(), 'YubiKey FIPS required.') |
47b346404f29c89ddc7e85cd3833564593823449 | zou/app/utils/query.py | zou/app/utils/query.py | def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
| import math
from zou.app import app
from zou.app.utils import fields
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
values are values to filter.
"""
criterions = {}
for key, value in request.args.items():
if key not in ["page"]:
criterions[key] = value
return criterions
def get_page_from_request(request):
"""
Return page parameter value (given through request query or post body).
Default value is 1.
"""
return request.args.get("page", 1)
def apply_criterions_to_db_query(model, db_query, criterions):
"""
Apply criterions given in HTTP request to the sqlachemy db query object.
"""
if "name" in criterions and hasattr(model, "name"):
value = criterions["name"]
db_query = db_query.filter(model.name.ilike(value))
del criterions["name"]
return db_query.filter_by(**criterions)
def get_paginated_results(query, page):
"""
Apply pagination to the query object.
"""
if page < 1:
entries = query.all()
return fields.serialize_list(entries)
else:
limit = app.config['NB_RECORDS_PER_PAGE']
total = query.count()
offset = (page - 1) * limit
nb_pages = int(math.ceil(total / float(limit)))
query = query.limit(limit)
query = query.offset(offset)
if (total < offset):
result = {
"data": [],
"total": 0,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
else:
result = {
"data": fields.serialize_list(query.all()),
"total": total,
"nb_pages": nb_pages,
"limit": limit,
"offset": offset,
"page": page
}
return result
| Add helper to paginate results | Add helper to paginate results
| Python | agpl-3.0 | cgwire/zou | ---
+++
@@ -1,3 +1,9 @@
+import math
+
+from zou.app import app
+from zou.app.utils import fields
+
+
def get_query_criterions_from_request(request):
"""
Turn request parameters into a dict where keys are attributes to filter and
@@ -28,3 +34,40 @@
del criterions["name"]
return db_query.filter_by(**criterions)
+
+
+def get_paginated_results(query, page):
+ """
+ Apply pagination to the query object.
+ """
+ if page < 1:
+ entries = query.all()
+ return fields.serialize_list(entries)
+ else:
+ limit = app.config['NB_RECORDS_PER_PAGE']
+ total = query.count()
+ offset = (page - 1) * limit
+
+ nb_pages = int(math.ceil(total / float(limit)))
+ query = query.limit(limit)
+ query = query.offset(offset)
+
+ if (total < offset):
+ result = {
+ "data": [],
+ "total": 0,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ else:
+ result = {
+ "data": fields.serialize_list(query.all()),
+ "total": total,
+ "nb_pages": nb_pages,
+ "limit": limit,
+ "offset": offset,
+ "page": page
+ }
+ return result |
c688a587d49d6186462737ab00c429a30c0a4d4c | src/puzzle/problems/crossword/crossword_problem.py | src/puzzle/problems/crossword/crossword_problem.py | import collections
import re
from data import crossword, warehouse
from puzzle.problems.crossword import _base_crossword_problem
_CROSSWORD_REGEX = re.compile(r'^.*\(([\d\s,|]+)\)$')
_INTS = re.compile(r'(\d+)')
class CrosswordProblem(_base_crossword_problem._BaseCrosswordProblem):
@staticmethod
def score(lines):
return _base_crossword_problem.score(lines)
def _solve(self):
clue = ''.join(self.lines)
clue_keywords = crossword.clue_keywords(clue)
cursor = warehouse.get('/phrases/crossword/cursor')
results = crossword.query(cursor, clue)
if not results:
return {}
max_frequency = max([f for _, f, _ in results])
ranked = []
for (solution, frequency, keywords) in results:
score = 0.0
for keyword in clue_keywords:
# Increase score by how often the keyword appeared in other clues.
score += keywords[keyword] / frequency
# Normalize score based on how many keywords were considered.
score /= len(clue_keywords)
rank = score * frequency / max_frequency
if rank:
ranked.append((solution, rank))
return collections.OrderedDict(
sorted(ranked, key=lambda x: x[1], reverse=True))
| import collections
from data import crossword, warehouse
from puzzle.problems.crossword import _base_crossword_problem
class CrosswordProblem(_base_crossword_problem._BaseCrosswordProblem):
@staticmethod
def score(lines):
return _base_crossword_problem.score(lines)
def _solve(self):
clue = ''.join(self.lines)
clue_keywords = crossword.clue_keywords(clue)
cursor = warehouse.get('/phrases/crossword/cursor')
results = crossword.query(cursor, clue)
if not results:
return {}
max_frequency = max([f for _, f, _ in results])
ranked = []
for (solution, frequency, keywords) in results:
score = 0.0
for keyword in clue_keywords:
# Increase score by how often the keyword appeared in other clues.
score += keywords[keyword] / frequency
# Normalize score based on how many keywords were considered.
score /= len(clue_keywords)
rank = score * frequency / max_frequency
if rank:
ranked.append((solution, rank))
return collections.OrderedDict(
sorted(ranked, key=lambda x: x[1], reverse=True))
| Delete unused module local constants. | Delete unused module local constants.
| Python | mit | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | ---
+++
@@ -1,11 +1,7 @@
import collections
-import re
from data import crossword, warehouse
from puzzle.problems.crossword import _base_crossword_problem
-
-_CROSSWORD_REGEX = re.compile(r'^.*\(([\d\s,|]+)\)$')
-_INTS = re.compile(r'(\d+)')
class CrosswordProblem(_base_crossword_problem._BaseCrosswordProblem): |
d478082c93125212f07a7b73e2d9d04d1b2c1058 | libthumbor/__init__.py | libthumbor/__init__.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# libthumbor - python extension to thumbor
# http://github.com/heynemann/libthumbor
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann heynemann@gmail.com
'''libthumbor is the library used to access thumbor's images in python'''
from pkg_resources import get_distribution
__version__ = get_distribution('libthumbor').version
from libthumbor.crypto import CryptoURL
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# libthumbor - python extension to thumbor
# http://github.com/heynemann/libthumbor
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 Bernardo Heynemann heynemann@gmail.com
'''libthumbor is the library used to access thumbor's images in python'''
from pkg_resources import get_distribution, DistributionNotFound
__project__ = 'libthumbor'
try:
__version__ = get_distribution(__project__).version
except DistributionNotFound:
# Returns a local version. For tests.
__version__ = '{}-local'.format(__project__)
from libthumbor.crypto import CryptoURL
| Fix verson number in tests | Fix verson number in tests
| Python | mit | APSL/libthumbor,thumbor/libthumbor,DomainGroupOSS/libthumbor | ---
+++
@@ -10,8 +10,14 @@
'''libthumbor is the library used to access thumbor's images in python'''
-from pkg_resources import get_distribution
+from pkg_resources import get_distribution, DistributionNotFound
-__version__ = get_distribution('libthumbor').version
+__project__ = 'libthumbor'
+try:
+ __version__ = get_distribution(__project__).version
+except DistributionNotFound:
+ # Returns a local version. For tests.
+ __version__ = '{}-local'.format(__project__)
+
from libthumbor.crypto import CryptoURL |
4ed0e5f1c6505a80bce50e27f1969693a9a4cdad | alignak_backend_client/__init__.py | alignak_backend_client/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend client library
This module is a Python library used the REST API of the Alignak backend
"""
# Application version and manifest
VERSION = (0, 5, 1)
__application__ = u"Alignak Backend client"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015-2016 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend client library"
__releasenotes__ = u"""Alignak backend client library"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-client"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Alignak REST backend client library
This module is a Python library used the REST API of the Alignak backend
"""
# Application version and manifest
VERSION = (0, 5, 2)
__application__ = u"Alignak Backend client"
__short_version__ = '.'.join((str(each) for each in VERSION[:2]))
__version__ = '.'.join((str(each) for each in VERSION[:4]))
__author__ = u"Alignak team"
__copyright__ = u"(c) 2015-2016 - %s" % __author__
__license__ = u"GNU Affero General Public License, version 3"
__description__ = u"Alignak backend client library"
__releasenotes__ = u"""Alignak backend client library"""
__doc_url__ = "https://github.com/Alignak-monitoring-contrib/alignak-backend-client"
# Application manifest
manifest = {
'name': __application__,
'version': __version__,
'author': __author__,
'description': __description__,
'copyright': __copyright__,
'license': __license__,
'release': __releasenotes__,
'doc': __doc_url__
}
| Set version to 0.5.2 to publish on Pypi | Set version to 0.5.2 to publish on Pypi
| Python | agpl-3.0 | Alignak-monitoring-contrib/alignak-backend-client,Alignak-monitoring-contrib/alignakbackend-api-client,Alignak-monitoring-contrib/alignakbackend-api-client,Alignak-monitoring-contrib/alignak-backend-client | ---
+++
@@ -8,7 +8,7 @@
This module is a Python library used the REST API of the Alignak backend
"""
# Application version and manifest
-VERSION = (0, 5, 1)
+VERSION = (0, 5, 2)
__application__ = u"Alignak Backend client"
__short_version__ = '.'.join((str(each) for each in VERSION[:2])) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.