commit stringlengths 40 40 | old_file stringlengths 4 150 | new_file stringlengths 4 150 | old_contents stringlengths 0 3.26k | new_contents stringlengths 1 4.43k | subject stringlengths 15 501 | message stringlengths 15 4.06k | lang stringclasses 4 values | license stringclasses 13 values | repos stringlengths 5 91.5k | diff stringlengths 0 4.35k |
|---|---|---|---|---|---|---|---|---|---|---|
d1a96f13204ad7028432096d25718e611d4d3d9d | depot/gpg.py | depot/gpg.py | import getpass
import os
import gnupg
class GPG(object):
def __init__(self, keyid):
self.gpg = gnupg.GPG(use_agent=False)
self.keyid = keyid
if not self.keyid:
# Compat with how Freight does it.
self.keyid = os.environ.get('GPG')
self.passphrase = None
self._verify()
def _verify(self):
"""Some sanity checks on GPG."""
if not self.keyid:
raise ValueError('No GPG key specified for signing, did you mean to use --no-sign?')
sign = self.gpg.sign('', keyid=self.keyid)
if 'secret key not available' in sign.stderr:
raise ValueError('Key not found')
elif 'NEED_PASSPHRASE' in sign.stderr:
self.passphrase = getpass.getpass('Passphrase for GPG key: ')
def sign(self, data, detach=False):
sign = self.gpg.sign(data, keyid=self.keyid, passphrase=self.passphrase, detach=detach)
if not sign:
raise ValueError(sign.stderr)
return str(sign)
def public_key(self):
return self.gpg.export_keys(self.keyid)
| import getpass
import os
import gnupg
class GPG(object):
def __init__(self, keyid, key=None, home=None):
self.gpg = gnupg.GPG(use_agent=False, gnupghome=home)
if key:
if not home:
raise ValueError('Cowardly refusing to import key in to default key store')
results = self.gpg.import_keys(key)
keyid = results.fingerprints[0]
self.keyid = keyid
if not self.keyid:
# Compat with how Freight does it.
self.keyid = os.environ.get('GPG')
self.passphrase = None
self._verify()
def _verify(self):
"""Some sanity checks on GPG."""
if not self.keyid:
raise ValueError('No GPG key specified for signing, did you mean to use --no-sign?')
sign = self.gpg.sign('', keyid=self.keyid)
if 'secret key not available' in sign.stderr:
raise ValueError('Key not found')
elif 'NEED_PASSPHRASE' in sign.stderr:
self.passphrase = getpass.getpass('Passphrase for GPG key: ')
def sign(self, data, detach=False):
sign = self.gpg.sign(data, keyid=self.keyid, passphrase=self.passphrase, detach=detach)
if not sign:
raise ValueError(sign.stderr)
return str(sign)
def public_key(self):
return self.gpg.export_keys(self.keyid)
| Allow passing in a raw key and homedir. | Allow passing in a raw key and homedir. | Python | apache-2.0 | coderanger/depot | ---
+++
@@ -5,8 +5,13 @@
class GPG(object):
- def __init__(self, keyid):
- self.gpg = gnupg.GPG(use_agent=False)
+ def __init__(self, keyid, key=None, home=None):
+ self.gpg = gnupg.GPG(use_agent=False, gnupghome=home)
+ if key:
+ if not home:
+ raise ValueError('Cowardly refusing to import key in to default key store')
+ results = self.gpg.import_keys(key)
+ keyid = results.fingerprints[0]
self.keyid = keyid
if not self.keyid:
# Compat with how Freight does it. |
11f43c583fb3b7e8ed2aa74f0f58445a6c2fbecf | bot/api/api.py | bot/api/api.py | from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
return self.get_updates(timeout=0)
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
| from bot.api.domain import Message
from bot.api.telegram import TelegramBotApi
from bot.storage import State
class Api:
def __init__(self, telegram_api: TelegramBotApi, state: State):
self.telegram_api = telegram_api
self.state = state
def send_message(self, message: Message, **params):
message_params = message.data.copy()
message_params.update(params)
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
there_are_pending_updates = True
while there_are_pending_updates:
there_are_pending_updates = False
for update in self.get_updates(timeout=0):
there_are_pending_updates = True
yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout)
for update in updates:
self.__set_updates_offset(update.update_id)
yield update
def __get_updates_offset(self):
return self.state.next_update_id
def __set_updates_offset(self, last_update_id):
self.state.next_update_id = str(last_update_id + 1)
def __getattr__(self, item):
return self.telegram_api.__getattr__(item)
| Fix get_pending_updates not correctly returning all pending updates | Fix get_pending_updates not correctly returning all pending updates
It was only returning the first 100 ones returned in the first telegram API call.
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | ---
+++
@@ -14,7 +14,12 @@
return self.telegram_api.sendMessage(**message_params)
def get_pending_updates(self):
- return self.get_updates(timeout=0)
+ there_are_pending_updates = True
+ while there_are_pending_updates:
+ there_are_pending_updates = False
+ for update in self.get_updates(timeout=0):
+ there_are_pending_updates = True
+ yield update
def get_updates(self, timeout=45):
updates = self.telegram_api.getUpdates(offset=self.__get_updates_offset(), timeout=timeout) |
48b8efabd11a44dfabcd91f6744858535ddfb498 | djangosaml2/templatetags/idplist.py | djangosaml2/templatetags/idplist.py | # Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
self.conf = config_settings_loader()
def render(self, context):
context[self.variable_name] = self.conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
| # Copyright (C) 2011 Yaco Sistemas (http://www.yaco.es)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django import template
from djangosaml2.conf import config_settings_loader
register = template.Library()
class IdPListNode(template.Node):
def __init__(self, variable_name):
self.variable_name = variable_name
def render(self, context):
conf = config_settings_loader()
context[self.variable_name] = conf.get_available_idps()
return ''
@register.tag
def idplist(parser, token):
try:
tag_name, as_part, variable = token.split_contents()
except ValueError:
raise template.TemplateSyntaxError(
'%r tag requires two arguments' % token.contents.split()[0])
if not as_part == 'as':
raise template.TemplateSyntaxError(
'%r tag first argument must be the literal "as"' % tag_name)
return IdPListNode(variable)
| Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant | Load the config as late as possible to avoid crashing when the configuration is not ready yet. Also this code is more reentrant
| Python | apache-2.0 | GradConnection/djangosaml2,advisory/djangosaml2_tenant,WiserTogether/djangosaml2,kviktor/djangosaml2-py3,advisory/djangosaml2_tenant,Gagnavarslan/djangosaml2,shabda/djangosaml2,GradConnection/djangosaml2,WiserTogether/djangosaml2,shabda/djangosaml2,kviktor/djangosaml2-py3,City-of-Helsinki/djangosaml2,City-of-Helsinki/djangosaml2 | ---
+++
@@ -23,10 +23,10 @@
def __init__(self, variable_name):
self.variable_name = variable_name
- self.conf = config_settings_loader()
def render(self, context):
- context[self.variable_name] = self.conf.get_available_idps()
+ conf = config_settings_loader()
+ context[self.variable_name] = conf.get_available_idps()
return ''
|
d636d8e74514bbda29170b18ef3de90dfbd96397 | pylisp/application/lispd/address_tree/ddt_container_node.py | pylisp/application/lispd/address_tree/ddt_container_node.py | '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
pass
| '''
Created on 1 jun. 2013
@author: sander
'''
from .container_node import ContainerNode
class DDTContainerNode(ContainerNode):
'''
A ContainerNode that indicates that we are responsible for this part of
the DDT tree.
'''
| Add a bit of docs | Add a bit of docs
| Python | bsd-3-clause | steffann/pylisp | ---
+++
@@ -7,4 +7,7 @@
class DDTContainerNode(ContainerNode):
- pass
+ '''
+ A ContainerNode that indicates that we are responsible for this part of
+ the DDT tree.
+ ''' |
b6e9215457eba813f91c9eb4a8b96f8652bcd5fc | functional_tests/pages/settings.py | functional_tests/pages/settings.py | # -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='.mui--text-title a.appbar-correct')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
| # -*- coding: utf-8 -*-
from selenium.webdriver.support.ui import Select
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
return_link = PageElement(css='#sidebar-brand a')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm')
content = PageElement(id_='content')
sidebar = PageElement(id_='sidebar')
sidebar_return_link = PageElement(css='#sidebar a#return')
_settings_list = MultiPageElement(tag_name='label')
@property
def settings_list(self):
return [setting.text for setting in self._settings_list]
_language_elem = PageElement(name='language')
@property
def language(self):
return Select(self._language_elem)
| Make the return link work again | Make the return link work again
| Python | mit | XeryusTC/projman,XeryusTC/projman,XeryusTC/projman | ---
+++
@@ -3,7 +3,7 @@
from page_objects import PageObject, PageElement, MultiPageElement
class SettingsPage(PageObject):
- return_link = PageElement(css='.mui--text-title a.appbar-correct')
+ return_link = PageElement(css='#sidebar-brand a')
inlist_delete_confirm = PageElement(name='inlist_delete_confirm')
action_delete_confirm = PageElement(name='action_delete_confirm')
confirm = PageElement(name='confirm') |
a38e293d76beaa71893a8f5c4be2ea562d6d3fc2 | apistar/layouts/standard/project/routes.py | apistar/layouts/standard/project/routes.py | from apistar import Route, Include
from apistar.docs import docs_routes
from apistar.statics import static_routes
from project.views import welcome
routes = [
Route('/', 'GET', welcome),
Include('/docs', docs_routes),
Include('/static', static_routes)
]
| from apistar import Include, Route
from apistar.docs import docs_routes
from apistar.statics import static_routes
from project.views import welcome
routes = [
Route('/', 'GET', welcome),
Include('/docs', docs_routes),
Include('/static', static_routes)
]
| Fix import ordering in standard layout | Fix import ordering in standard layout
| Python | bsd-3-clause | encode/apistar,rsalmaso/apistar,rsalmaso/apistar,encode/apistar,tomchristie/apistar,tomchristie/apistar,tomchristie/apistar,thimslugga/apistar,thimslugga/apistar,rsalmaso/apistar,encode/apistar,rsalmaso/apistar,encode/apistar,thimslugga/apistar,thimslugga/apistar,tomchristie/apistar | ---
+++
@@ -1,4 +1,4 @@
-from apistar import Route, Include
+from apistar import Include, Route
from apistar.docs import docs_routes
from apistar.statics import static_routes
from project.views import welcome |
bfad91e9685b94f7c1072126d44ef29c9235f973 | reports/management/commands/tests.py | reports/management/commands/tests.py | import pytz
try:
import mock
except ImportError:
from unittest import mock
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from datetime import datetime
from django.conf import settings
from django.core import management
from django.test import TestCase
from tempfile import NamedTemporaryFile
class ManagementCommandsTests(TestCase):
def mk_tempfile(self):
tmp_file = NamedTemporaryFile(suffix='.xlsx')
self.addCleanup(tmp_file.close)
return tmp_file
def midnight(self, timestamp):
return timestamp.replace(hour=0, minute=0, second=0, microsecond=0,
tzinfo=pytz.timezone(settings.TIME_ZONE))
def test_command_requires_output_file(self):
with self.assertRaises(management.CommandError) as ce:
management.call_command('generate_reports',
'--start', '2016-01-01', '--end', '2016-02-01',
'--email-to', 'foo@example.com',
'--email-subject', 'The Email Subject')
self.assertEqual(
str(ce.exception), "Please specify --output-file.")
@mock.patch("reports.tasks.generate_report.apply_async")
def test_command_successful(self, mock_generation):
tmp_file = self.mk_tempfile()
management.call_command('generate_reports',
'--start', '2016-01-01', '--end', '2016-02-01',
'--output-file', tmp_file.name,
'--email-to', 'foo@example.com',
'--email-subject', 'The Email Subject')
mock_generation.assert_called_once_with(kwargs={
'output_file': tmp_file.name,
'start_date': self.midnight(datetime.strptime('2016-01-01',
'%Y-%m-%d')),
'end_date': self.midnight(datetime.strptime('2016-02-01',
'%Y-%m-%d')),
'email_recipients': ['foo@example.com'],
'email_sender': settings.DEFAULT_FROM_EMAIL,
'email_subject': 'The Email Subject'})
| Test report generation management command calls task correctly | Test report generation management command calls task correctly
| Python | bsd-3-clause | praekelt/hellomama-registration,praekelt/hellomama-registration | ---
+++
@@ -0,0 +1,54 @@
+import pytz
+try:
+ import mock
+except ImportError:
+ from unittest import mock
+
+try:
+ from StringIO import StringIO
+except ImportError:
+ from io import StringIO
+
+from datetime import datetime
+from django.conf import settings
+from django.core import management
+from django.test import TestCase
+from tempfile import NamedTemporaryFile
+
+
+class ManagementCommandsTests(TestCase):
+ def mk_tempfile(self):
+ tmp_file = NamedTemporaryFile(suffix='.xlsx')
+ self.addCleanup(tmp_file.close)
+ return tmp_file
+
+ def midnight(self, timestamp):
+ return timestamp.replace(hour=0, minute=0, second=0, microsecond=0,
+ tzinfo=pytz.timezone(settings.TIME_ZONE))
+
+ def test_command_requires_output_file(self):
+ with self.assertRaises(management.CommandError) as ce:
+ management.call_command('generate_reports',
+ '--start', '2016-01-01', '--end', '2016-02-01',
+ '--email-to', 'foo@example.com',
+ '--email-subject', 'The Email Subject')
+ self.assertEqual(
+ str(ce.exception), "Please specify --output-file.")
+
+ @mock.patch("reports.tasks.generate_report.apply_async")
+ def test_command_successful(self, mock_generation):
+ tmp_file = self.mk_tempfile()
+ management.call_command('generate_reports',
+ '--start', '2016-01-01', '--end', '2016-02-01',
+ '--output-file', tmp_file.name,
+ '--email-to', 'foo@example.com',
+ '--email-subject', 'The Email Subject')
+ mock_generation.assert_called_once_with(kwargs={
+ 'output_file': tmp_file.name,
+ 'start_date': self.midnight(datetime.strptime('2016-01-01',
+ '%Y-%m-%d')),
+ 'end_date': self.midnight(datetime.strptime('2016-02-01',
+ '%Y-%m-%d')),
+ 'email_recipients': ['foo@example.com'],
+ 'email_sender': settings.DEFAULT_FROM_EMAIL,
+ 'email_subject': 'The Email Subject'}) | |
0e3da072c5ff62996ebed9d5d6909bbf9bb4b30f | jsonrpcserver/blueprint.py | jsonrpcserver/blueprint.py | """blueprint.py"""
import flask
from werkzeug.exceptions import HTTPException
from werkzeug.exceptions import default_exceptions
from jsonrpcserver import exceptions
from jsonrpcserver import logger
from jsonrpcserver import bp
def error(e, response_str):
"""Ensure we always respond with jsonrpc, such as on 400 or other bad
request"""
response = flask.Response(response_str, mimetype='application/json')
response.status_code = (e.code if isinstance(e, HTTPException) else 500)
logger.info('<-- {} {}'.format(response.status_code, response_str))
return response
def invalid_request(e):
"""Status codes 400-499"""
return error(e, str(exceptions.InvalidRequest()))
def internal_error(e):
"""Any error other than status codes 400-499"""
return error(e, str(exceptions.InternalError()))
# Override flask internal error handlers, to return as jsonrpc
for code in default_exceptions.keys():
if 400 <= code <= 499:
bp.app_errorhandler(code)(invalid_request)
else:
bp.app_errorhandler(code)(internal_error)
# Catch RPCHandler exceptions and return jsonrpc
@bp.app_errorhandler(exceptions.RPCHandlerException)
def handler_error(e):
"""RPCHandlerExceptions: responds with json"""
return error(e, str(e))
| """blueprint.py"""
import flask
from werkzeug.exceptions import HTTPException
from werkzeug.exceptions import default_exceptions
from jsonrpcserver import exceptions
from jsonrpcserver import logger
from jsonrpcserver import bp
def error(e, response_str):
"""Ensure we always respond with jsonrpc, such as on 400 or other bad
request"""
response = flask.Response(response_str, mimetype='application/json')
response.status_code = (e.code if isinstance(e, HTTPException) else 400)
logger.info('<-- {} {}'.format(response.status_code, response_str))
return response
def invalid_request(e):
"""Status codes 400-499"""
return error(e, str(exceptions.InvalidRequest()))
def internal_error(e):
"""Any error other than status codes 400-499"""
return error(e, str(exceptions.InternalError()))
# Override flask internal error handlers, to return as jsonrpc
for code in default_exceptions.keys():
if 400 <= code <= 499:
bp.app_errorhandler(code)(invalid_request)
else:
bp.app_errorhandler(code)(internal_error)
# Catch RPCHandler exceptions and return jsonrpc
@bp.app_errorhandler(exceptions.RPCHandlerException)
def handler_error(e):
"""RPCHandlerExceptions: responds with json"""
return error(e, str(e))
| Return http status code 400 (Bad Request) unless flask gives an error code | Return http status code 400 (Bad Request) unless flask gives an error code
| Python | mit | bcb/jsonrpcserver | ---
+++
@@ -8,24 +8,28 @@
from jsonrpcserver import logger
from jsonrpcserver import bp
+
def error(e, response_str):
"""Ensure we always respond with jsonrpc, such as on 400 or other bad
request"""
response = flask.Response(response_str, mimetype='application/json')
- response.status_code = (e.code if isinstance(e, HTTPException) else 500)
+ response.status_code = (e.code if isinstance(e, HTTPException) else 400)
logger.info('<-- {} {}'.format(response.status_code, response_str))
return response
+
def invalid_request(e):
"""Status codes 400-499"""
return error(e, str(exceptions.InvalidRequest()))
+
def internal_error(e):
"""Any error other than status codes 400-499"""
return error(e, str(exceptions.InternalError()))
+
# Override flask internal error handlers, to return as jsonrpc
for code in default_exceptions.keys():
@@ -34,6 +38,7 @@
else:
bp.app_errorhandler(code)(internal_error)
+
# Catch RPCHandler exceptions and return jsonrpc
@bp.app_errorhandler(exceptions.RPCHandlerException)
def handler_error(e): |
e9bd1c56025e380444ba1e92f6631f59dd01a10a | cms/djangoapps/contentstore/views/session_kv_store.py | cms/djangoapps/contentstore/views/session_kv_store.py | from xblock.runtime import KeyValueStore, InvalidScopeError
class SessionKeyValueStore(KeyValueStore):
def __init__(self, request, descriptor_model_data):
self._descriptor_model_data = descriptor_model_data
self._session = request.session
def get(self, key):
try:
return self._descriptor_model_data[key.field_name]
except (KeyError, InvalidScopeError):
return self._session[tuple(key)]
def set(self, key, value):
try:
self._descriptor_model_data[key.field_name] = value
except (KeyError, InvalidScopeError):
self._session[tuple(key)] = value
def delete(self, key):
try:
del self._descriptor_model_data[key.field_name]
except (KeyError, InvalidScopeError):
del self._session[tuple(key)]
def has(self, key):
return key in self._descriptor_model_data or key in self._session
| from xblock.runtime import KeyValueStore, InvalidScopeError
class SessionKeyValueStore(KeyValueStore):
def __init__(self, request, descriptor_model_data):
self._descriptor_model_data = descriptor_model_data
self._session = request.session
def get(self, key):
try:
return self._descriptor_model_data[key.field_name]
except (KeyError, InvalidScopeError):
return self._session[tuple(key)]
def set(self, key, value):
try:
self._descriptor_model_data[key.field_name] = value
except (KeyError, InvalidScopeError):
self._session[tuple(key)] = value
def delete(self, key):
try:
del self._descriptor_model_data[key.field_name]
except (KeyError, InvalidScopeError):
del self._session[tuple(key)]
def has(self, key):
return key.field_name in self._descriptor_model_data or tuple(key) in self._session
| Fix SessionKeyValueStore.has to use the correct indexing value when looking up data | Fix SessionKeyValueStore.has to use the correct indexing value when looking up data
| Python | agpl-3.0 | ampax/edx-platform-backup,ak2703/edx-platform,ferabra/edx-platform,doganov/edx-platform,pdehaye/theming-edx-platform,jelugbo/tundex,jazztpt/edx-platform,shubhdev/edx-platform,J861449197/edx-platform,wwj718/ANALYSE,wwj718/ANALYSE,doismellburning/edx-platform,shabab12/edx-platform,jamiefolsom/edx-platform,abdoosh00/edraak,hkawasaki/kawasaki-aio8-1,Edraak/circleci-edx-platform,Ayub-Khan/edx-platform,4eek/edx-platform,doganov/edx-platform,analyseuc3m/ANALYSE-v1,ovnicraft/edx-platform,fintech-circle/edx-platform,kmoocdev2/edx-platform,xuxiao19910803/edx-platform,dkarakats/edx-platform,unicri/edx-platform,chudaol/edx-platform,torchingloom/edx-platform,jazkarta/edx-platform,tiagochiavericosta/edx-platform,adoosii/edx-platform,wwj718/ANALYSE,pomegranited/edx-platform,teltek/edx-platform,mjirayu/sit_academy,naresh21/synergetics-edx-platform,raccoongang/edx-platform,TeachAtTUM/edx-platform,nanolearningllc/edx-platform-cypress,morpheby/levelup-by,4eek/edx-platform,openfun/edx-platform,AkA84/edx-platform,defance/edx-platform,tiagochiavericosta/edx-platform,shubhdev/edx-platform,iivic/BoiseStateX,shubhdev/openedx,Ayub-Khan/edx-platform,nttks/edx-platform,ubc/edx-platform,kamalx/edx-platform,xingyepei/edx-platform,PepperPD/edx-pepper-platform,zubair-arbi/edx-platform,proversity-org/edx-platform,ahmedaljazzar/edx-platform,solashirai/edx-platform,Semi-global/edx-platform,chauhanhardik/populo_2,chand3040/cloud_that,shubhdev/edxOnBaadal,shubhdev/openedx,cecep-edu/edx-platform,zhenzhai/edx-platform,torchingloom/edx-platform,Softmotions/edx-platform,valtech-mooc/edx-platform,amir-qayyum-khan/edx-platform,AkA84/edx-platform,fintech-circle/edx-platform,Edraak/edraak-platform,jbzdak/edx-platform,peterm-itr/edx-platform,shubhdev/openedx,jamesblunt/edx-platform,y12uc231/edx-platform,defance/edx-platform,mbareta/edx-platform-ft,proversity-org/edx-platform,xingyepei/edx-platform,doismellburning/edx-platform,jruiperezv/ANALYSE,Edraak/edx-platform,CredoReference/edx-platform,TeachAtTUM/edx-platform,sameetb-cuelogic/edx-platform-test,prarthitm/edxplatform,inares/edx-platform,IONISx/edx-platform,philanthropy-u/edx-platform,y12uc231/edx-platform,EduPepperPDTesting/pepper2013-testing,y12uc231/edx-platform,jswope00/GAI,solashirai/edx-platform,halvertoluke/edx-platform,cselis86/edx-platform,prarthitm/edxplatform,deepsrijit1105/edx-platform,bigdatauniversity/edx-platform,kamalx/edx-platform,kamalx/edx-platform,angelapper/edx-platform,fly19890211/edx-platform,knehez/edx-platform,DNFcode/edx-platform,vikas1885/test1,RPI-OPENEDX/edx-platform,vismartltd/edx-platform,dkarakats/edx-platform,Livit/Livit.Learn.EdX,hkawasaki/kawasaki-aio8-1,mtlchun/edx,rationalAgent/edx-platform-custom,jruiperezv/ANALYSE,jazztpt/edx-platform,cyanna/edx-platform,shubhdev/edx-platform,jazkarta/edx-platform-for-isc,appsembler/edx-platform,yokose-ks/edx-platform,atsolakid/edx-platform,rationalAgent/edx-platform-custom,WatanabeYasumasa/edx-platform,eestay/edx-platform,edx-solutions/edx-platform,rismalrv/edx-platform,alexthered/kienhoc-platform,jamiefolsom/edx-platform,bitifirefly/edx-platform,wwj718/edx-platform,cyanna/edx-platform,chauhanhardik/populo,jazkarta/edx-platform,tanmaykm/edx-platform,stvstnfrd/edx-platform,PepperPD/edx-pepper-platform,jazztpt/edx-platform,nikolas/edx-platform,zerobatu/edx-platform,etzhou/edx-platform,arifsetiawan/edx-platform,peterm-itr/edx-platform,MSOpenTech/edx-platform,ESOedX/edx-platform,chand3040/cloud_that,jruiperezv/ANALYSE,lduarte1991/edx-platform,ESOedX/edx-platform,wwj718/ANALYSE,jbzdak/edx-platform,peterm-itr/edx-platform,mjg2203/edx-platform-seas,shurihell/testasia,romain-li/edx-platform,RPI-OPENEDX/edx-platform,Livit/Livit.Learn.EdX,jbassen/edx-platform,pku9104038/edx-platform,J861449197/edx-platform,mcgachey/edx-platform,arbrandes/edx-platform,PepperPD/edx-pepper-platform,kmoocdev/edx-platform,mjg2203/edx-platform-seas,y12uc231/edx-platform,benpatterson/edx-platform,kmoocdev/edx-platform,longmen21/edx-platform,pdehaye/theming-edx-platform,halvertoluke/edx-platform,jswope00/griffinx,nttks/edx-platform,don-github/edx-platform,Kalyzee/edx-platform,jbzdak/edx-platform,cognitiveclass/edx-platform,ovnicraft/edx-platform,eemirtekin/edx-platform,xuxiao19910803/edx,itsjeyd/edx-platform,JioEducation/edx-platform,syjeon/new_edx,eduNEXT/edx-platform,ubc/edx-platform,LICEF/edx-platform,dcosentino/edx-platform,CredoReference/edx-platform,OmarIthawi/edx-platform,hmcmooc/muddx-platform,EduPepperPD/pepper2013,RPI-OPENEDX/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,MSOpenTech/edx-platform,nanolearning/edx-platform,sudheerchintala/LearnEraPlatForm,beni55/edx-platform,devs1991/test_edx_docmode,zerobatu/edx-platform,ZLLab-Mooc/edx-platform,IndonesiaX/edx-platform,arbrandes/edx-platform,ZLLab-Mooc/edx-platform,louyihua/edx-platform,IndonesiaX/edx-platform,xinjiguaike/edx-platform,cselis86/edx-platform,hastexo/edx-platform,hamzehd/edx-platform,dcosentino/edx-platform,zadgroup/edx-platform,kamalx/edx-platform,nttks/jenkins-test,wwj718/edx-platform,mcgachey/edx-platform,rismalrv/edx-platform,Shrhawk/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform-backup,morenopc/edx-platform,tanmaykm/edx-platform,dsajkl/123,mtlchun/edx,eduNEXT/edunext-platform,angelapper/edx-platform,beni55/edx-platform,beacloudgenius/edx-platform,dkarakats/edx-platform,amir-qayyum-khan/edx-platform,yokose-ks/edx-platform,morenopc/edx-platform,jbassen/edx-platform,pelikanchik/edx-platform,mtlchun/edx,hkawasaki/kawasaki-aio8-0,AkA84/edx-platform,kursitet/edx-platform,ferabra/edx-platform,hamzehd/edx-platform,ampax/edx-platform-backup,JCBarahona/edX,morpheby/levelup-by,rhndg/openedx,zhenzhai/edx-platform,jonathan-beard/edx-platform,rhndg/openedx,apigee/edx-platform,shabab12/edx-platform,Kalyzee/edx-platform,Endika/edx-platform,shubhdev/openedx,jazkarta/edx-platform,pomegranited/edx-platform,hkawasaki/kawasaki-aio8-0,chrisndodge/edx-platform,xingyepei/edx-platform,B-MOOC/edx-platform,kxliugang/edx-platform,polimediaupv/edx-platform,teltek/edx-platform,andyzsf/edx,cognitiveclass/edx-platform,jamiefolsom/edx-platform,antoviaque/edx-platform,LICEF/edx-platform,edry/edx-platform,jjmiranda/edx-platform,ampax/edx-platform,mushtaqak/edx-platform,xuxiao19910803/edx,zadgroup/edx-platform,sudheerchintala/LearnEraPlatForm,mtlchun/edx,leansoft/edx-platform,alu042/edx-platform,deepsrijit1105/edx-platform,kxliugang/edx-platform,nttks/edx-platform,doganov/edx-platform,doganov/edx-platform,abdoosh00/edx-rtl-final,Stanford-Online/edx-platform,jamesblunt/edx-platform,jolyonb/edx-platform,motion2015/a3,appliedx/edx-platform,shurihell/testasia,pomegranited/edx-platform,playm2mboy/edx-platform,zubair-arbi/edx-platform,jamiefolsom/edx-platform,eestay/edx-platform,ubc/edx-platform,auferack08/edx-platform,yokose-ks/edx-platform,xingyepei/edx-platform,pku9104038/edx-platform,jazkarta/edx-platform-for-isc,cecep-edu/edx-platform,OmarIthawi/edx-platform,eestay/edx-platform,PepperPD/edx-pepper-platform,SivilTaram/edx-platform,Edraak/edx-platform,solashirai/edx-platform,ahmadio/edx-platform,devs1991/test_edx_docmode,praveen-pal/edx-platform,dcosentino/edx-platform,simbs/edx-platform,Livit/Livit.Learn.EdX,arifsetiawan/edx-platform,UXE/local-edx,cecep-edu/edx-platform,Semi-global/edx-platform,pomegranited/edx-platform,xuxiao19910803/edx-platform,nttks/jenkins-test,LearnEra/LearnEraPlaftform,Shrhawk/edx-platform,edx/edx-platform,kursitet/edx-platform,kmoocdev2/edx-platform,marcore/edx-platform,CourseTalk/edx-platform,franosincic/edx-platform,motion2015/a3,hamzehd/edx-platform,mitocw/edx-platform,leansoft/edx-platform,tiagochiavericosta/edx-platform,cyanna/edx-platform,Edraak/circleci-edx-platform,ak2703/edx-platform,wwj718/edx-platform,xinjiguaike/edx-platform,iivic/BoiseStateX,jswope00/griffinx,Edraak/edx-platform,beacloudgenius/edx-platform,chauhanhardik/populo_2,jzoldak/edx-platform,LICEF/edx-platform,shurihell/testasia,itsjeyd/edx-platform,franosincic/edx-platform,Edraak/edraak-platform,solashirai/edx-platform,hkawasaki/kawasaki-aio8-2,pabloborrego93/edx-platform,doganov/edx-platform,romain-li/edx-platform,zofuthan/edx-platform,ovnicraft/edx-platform,mjg2203/edx-platform-seas,torchingloom/edx-platform,JCBarahona/edX,chauhanhardik/populo,vasyarv/edx-platform,TeachAtTUM/edx-platform,shubhdev/edxOnBaadal,Stanford-Online/edx-platform,jswope00/GAI,msegado/edx-platform,dsajkl/123,B-MOOC/edx-platform,kalebhartje/schoolboost,fly19890211/edx-platform,pelikanchik/edx-platform,praveen-pal/edx-platform,Semi-global/edx-platform,jbassen/edx-platform,hastexo/edx-platform,MSOpenTech/edx-platform,jswope00/griffinx,mcgachey/edx-platform,gymnasium/edx-platform,EduPepperPDTesting/pepper2013-testing,tanmaykm/edx-platform,knehez/edx-platform,longmen21/edx-platform,mitocw/edx-platform,LICEF/edx-platform,procangroup/edx-platform,pabloborrego93/edx-platform,jswope00/GAI,EduPepperPD/pepper2013,gymnasium/edx-platform,inares/edx-platform,caesar2164/edx-platform,kursitet/edx-platform,raccoongang/edx-platform,mitocw/edx-platform,cyanna/edx-platform,eduNEXT/edunext-platform,IONISx/edx-platform,ahmadio/edx-platform,beni55/edx-platform,syjeon/new_edx,miptliot/edx-platform,nikolas/edx-platform,synergeticsedx/deployment-wipro,rhndg/openedx,appliedx/edx-platform,nttks/jenkins-test,Semi-global/edx-platform,don-github/edx-platform,chudaol/edx-platform,EDUlib/edx-platform,olexiim/edx-platform,Edraak/circleci-edx-platform,mjirayu/sit_academy,UOMx/edx-platform,solashirai/edx-platform,edx/edx-platform,knehez/edx-platform,mtlchun/edx,franosincic/edx-platform,mushtaqak/edx-platform,zubair-arbi/edx-platform,utecuy/edx-platform,J861449197/edx-platform,kmoocdev2/edx-platform,mcgachey/edx-platform,MakeHer/edx-platform,alexthered/kienhoc-platform,motion2015/a3,RPI-OPENEDX/edx-platform,pepeportela/edx-platform,naresh21/synergetics-edx-platform,kamalx/edx-platform,ESOedX/edx-platform,IONISx/edx-platform,appliedx/edx-platform,IONISx/edx-platform,bitifirefly/edx-platform,EduPepperPDTesting/pepper2013-testing,DefyVentures/edx-platform,analyseuc3m/ANALYSE-v1,polimediaupv/edx-platform,martynovp/edx-platform,rue89-tech/edx-platform,bdero/edx-platform,nanolearning/edx-platform,pepeportela/edx-platform,antonve/s4-project-mooc,caesar2164/edx-platform,ahmedaljazzar/edx-platform,jolyonb/edx-platform,praveen-pal/edx-platform,utecuy/edx-platform,MakeHer/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,olexiim/edx-platform,olexiim/edx-platform,longmen21/edx-platform,waheedahmed/edx-platform,rationalAgent/edx-platform-custom,lduarte1991/edx-platform,dsajkl/123,Edraak/edraak-platform,devs1991/test_edx_docmode,kxliugang/edx-platform,itsjeyd/edx-platform,jruiperezv/ANALYSE,motion2015/edx-platform,playm2mboy/edx-platform,carsongee/edx-platform,shashank971/edx-platform,jonathan-beard/edx-platform,4eek/edx-platform,SivilTaram/edx-platform,ak2703/edx-platform,antoviaque/edx-platform,philanthropy-u/edx-platform,unicri/edx-platform,gymnasium/edx-platform,EduPepperPD/pepper2013,hamzehd/edx-platform,cognitiveclass/edx-platform,DNFcode/edx-platform,ahmadio/edx-platform,shashank971/edx-platform,msegado/edx-platform,motion2015/edx-platform,mahendra-r/edx-platform,sudheerchintala/LearnEraPlatForm,Unow/edx-platform,marcore/edx-platform,inares/edx-platform,kmoocdev/edx-platform,sameetb-cuelogic/edx-platform-test,fintech-circle/edx-platform,olexiim/edx-platform,devs1991/test_edx_docmode,CredoReference/edx-platform,pdehaye/theming-edx-platform,pelikanchik/edx-platform,ubc/edx-platform,praveen-pal/edx-platform,Ayub-Khan/edx-platform,apigee/edx-platform,MakeHer/edx-platform,beacloudgenius/edx-platform,Softmotions/edx-platform,kmoocdev/edx-platform,martynovp/edx-platform,abdoosh00/edraak,alu042/edx-platform,zadgroup/edx-platform,jzoldak/edx-platform,jamiefolsom/edx-platform,EDUlib/edx-platform,shubhdev/edxOnBaadal,IITBinterns13/edx-platform-dev,motion2015/edx-platform,benpatterson/edx-platform,vikas1885/test1,cpennington/edx-platform,Lektorium-LLC/edx-platform,jbzdak/edx-platform,jazztpt/edx-platform,Shrhawk/edx-platform,dcosentino/edx-platform,proversity-org/edx-platform,zubair-arbi/edx-platform,xuxiao19910803/edx-platform,hamzehd/edx-platform,hkawasaki/kawasaki-aio8-2,bdero/edx-platform,EDUlib/edx-platform,martynovp/edx-platform,jolyonb/edx-platform,UOMx/edx-platform,nanolearning/edx-platform,bitifirefly/edx-platform,itsjeyd/edx-platform,beacloudgenius/edx-platform,antoviaque/edx-platform,hkawasaki/kawasaki-aio8-1,iivic/BoiseStateX,cselis86/edx-platform,zhenzhai/edx-platform,B-MOOC/edx-platform,JioEducation/edx-platform,Unow/edx-platform,apigee/edx-platform,cognitiveclass/edx-platform,kalebhartje/schoolboost,arifsetiawan/edx-platform,waheedahmed/edx-platform,raccoongang/edx-platform,rue89-tech/edx-platform,xinjiguaike/edx-platform,JCBarahona/edX,nttks/jenkins-test,pdehaye/theming-edx-platform,WatanabeYasumasa/edx-platform,bitifirefly/edx-platform,kalebhartje/schoolboost,mjirayu/sit_academy,olexiim/edx-platform,SravanthiSinha/edx-platform,nagyistoce/edx-platform,shurihell/testasia,eemirtekin/edx-platform,valtech-mooc/edx-platform,abdoosh00/edx-rtl-final,shashank971/edx-platform,tanmaykm/edx-platform,ZLLab-Mooc/edx-platform,morenopc/edx-platform,shubhdev/edxOnBaadal,nagyistoce/edx-platform,teltek/edx-platform,hkawasaki/kawasaki-aio8-0,wwj718/edx-platform,OmarIthawi/edx-platform,utecuy/edx-platform,JioEducation/edx-platform,philanthropy-u/edx-platform,unicri/edx-platform,simbs/edx-platform,Unow/edx-platform,beacloudgenius/edx-platform,pku9104038/edx-platform,hastexo/edx-platform,torchingloom/edx-platform,chauhanhardik/populo,beni55/edx-platform,andyzsf/edx,teltek/edx-platform,martynovp/edx-platform,eduNEXT/edx-platform,chudaol/edx-platform,DefyVentures/edx-platform,nanolearning/edx-platform,ampax/edx-platform,pepeportela/edx-platform,synergeticsedx/deployment-wipro,WatanabeYasumasa/edx-platform,PepperPD/edx-pepper-platform,auferack08/edx-platform,10clouds/edx-platform,simbs/edx-platform,a-parhom/edx-platform,xinjiguaike/edx-platform,louyihua/edx-platform,etzhou/edx-platform,ahmadiga/min_edx,vasyarv/edx-platform,hmcmooc/muddx-platform,doismellburning/edx-platform,EduPepperPDTesting/pepper2013-testing,mushtaqak/edx-platform,chauhanhardik/populo_2,DefyVentures/edx-platform,ampax/edx-platform,chrisndodge/edx-platform,edx/edx-platform,edx-solutions/edx-platform,mjirayu/sit_academy,Shrhawk/edx-platform,hkawasaki/kawasaki-aio8-1,chauhanhardik/populo,IITBinterns13/edx-platform-dev,IONISx/edx-platform,SravanthiSinha/edx-platform,pelikanchik/edx-platform,jazkarta/edx-platform,jelugbo/tundex,synergeticsedx/deployment-wipro,vismartltd/edx-platform,carsongee/edx-platform,Kalyzee/edx-platform,naresh21/synergetics-edx-platform,adoosii/edx-platform,ahmedaljazzar/edx-platform,cpennington/edx-platform,zofuthan/edx-platform,JCBarahona/edX,chrisndodge/edx-platform,OmarIthawi/edx-platform,ubc/edx-platform,chand3040/cloud_that,Edraak/circleci-edx-platform,bitifirefly/edx-platform,proversity-org/edx-platform,cecep-edu/edx-platform,LICEF/edx-platform,analyseuc3m/ANALYSE-v1,eestay/edx-platform,TsinghuaX/edx-platform,TsinghuaX/edx-platform,B-MOOC/edx-platform,xuxiao19910803/edx,edry/edx-platform,appliedx/edx-platform,atsolakid/edx-platform,Edraak/circleci-edx-platform,don-github/edx-platform,kursitet/edx-platform,bdero/edx-platform,playm2mboy/edx-platform,motion2015/edx-platform,antonve/s4-project-mooc,motion2015/edx-platform,Ayub-Khan/edx-platform,RPI-OPENEDX/edx-platform,appsembler/edx-platform,abdoosh00/edraak,don-github/edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,rismalrv/edx-platform,waheedahmed/edx-platform,caesar2164/edx-platform,nikolas/edx-platform,nanolearningllc/edx-platform-cypress-2,shabab12/edx-platform,analyseuc3m/ANALYSE-v1,DNFcode/edx-platform,fly19890211/edx-platform,zubair-arbi/edx-platform,mcgachey/edx-platform,vikas1885/test1,valtech-mooc/edx-platform,benpatterson/edx-platform,nanolearningllc/edx-platform-cypress,nttks/edx-platform,jonathan-beard/edx-platform,jamesblunt/edx-platform,caesar2164/edx-platform,a-parhom/edx-platform,chauhanhardik/populo_2,jbzdak/edx-platform,Lektorium-LLC/edx-platform,xingyepei/edx-platform,iivic/BoiseStateX,jjmiranda/edx-platform,gsehub/edx-platform,longmen21/edx-platform,stvstnfrd/edx-platform,atsolakid/edx-platform,dsajkl/reqiop,CourseTalk/edx-platform,etzhou/edx-platform,IndonesiaX/edx-platform,amir-qayyum-khan/edx-platform,synergeticsedx/deployment-wipro,Semi-global/edx-platform,DefyVentures/edx-platform,rismalrv/edx-platform,mushtaqak/edx-platform,sameetb-cuelogic/edx-platform-test,mbareta/edx-platform-ft,JCBarahona/edX,ovnicraft/edx-platform,Stanford-Online/edx-platform,jamesblunt/edx-platform,nagyistoce/edx-platform,B-MOOC/edx-platform,simbs/edx-platform,vikas1885/test1,ZLLab-Mooc/edx-platform,torchingloom/edx-platform,alexthered/kienhoc-platform,nttks/edx-platform,marcore/edx-platform,jazkarta/edx-platform-for-isc,xuxiao19910803/edx,leansoft/edx-platform,ahmadio/edx-platform,ZLLab-Mooc/edx-platform,utecuy/edx-platform,Endika/edx-platform,BehavioralInsightsTeam/edx-platform,jzoldak/edx-platform,eduNEXT/edx-platform,mahendra-r/edx-platform,adoosii/edx-platform,dsajkl/123,inares/edx-platform,EDUlib/edx-platform,jswope00/griffinx,rhndg/openedx,motion2015/a3,edx/edx-platform,EduPepperPD/pepper2013,UXE/local-edx,benpatterson/edx-platform,jjmiranda/edx-platform,jazztpt/edx-platform,waheedahmed/edx-platform,gymnasium/edx-platform,edry/edx-platform,kalebhartje/schoolboost,nanolearning/edx-platform,Kalyzee/edx-platform,morenopc/edx-platform,bigdatauniversity/edx-platform,alu042/edx-platform,xuxiao19910803/edx-platform,rue89-tech/edx-platform,fly19890211/edx-platform,pabloborrego93/edx-platform,eemirtekin/edx-platform,halvertoluke/edx-platform,louyihua/edx-platform,TsinghuaX/edx-platform,kmoocdev2/edx-platform,CourseTalk/edx-platform,zerobatu/edx-platform,tiagochiavericosta/edx-platform,Unow/edx-platform,devs1991/test_edx_docmode,MakeHer/edx-platform,ESOedX/edx-platform,miptliot/edx-platform,4eek/edx-platform,Edraak/edraak-platform,gsehub/edx-platform,romain-li/edx-platform,knehez/edx-platform,pepeportela/edx-platform,MakeHer/edx-platform,unicri/edx-platform,halvertoluke/edx-platform,ampax/edx-platform-backup,openfun/edx-platform,nanolearningllc/edx-platform-cypress-2,syjeon/new_edx,ahmadiga/min_edx,raccoongang/edx-platform,don-github/edx-platform,playm2mboy/edx-platform,chand3040/cloud_that,inares/edx-platform,cpennington/edx-platform,vismartltd/edx-platform,Shrhawk/edx-platform,leansoft/edx-platform,IITBinterns13/edx-platform-dev,ahmadio/edx-platform,shashank971/edx-platform,ak2703/edx-platform,longmen21/edx-platform,msegado/edx-platform,sameetb-cuelogic/edx-platform-test,y12uc231/edx-platform,gsehub/edx-platform,eemirtekin/edx-platform,mjg2203/edx-platform-seas,andyzsf/edx,nanolearningllc/edx-platform-cypress,DefyVentures/edx-platform,shubhdev/edxOnBaadal,abdoosh00/edraak,shurihell/testasia,tiagochiavericosta/edx-platform,zofuthan/edx-platform,jelugbo/tundex,hastexo/edx-platform,nikolas/edx-platform,appliedx/edx-platform,doismellburning/edx-platform,auferack08/edx-platform,kxliugang/edx-platform,chauhanhardik/populo,morenopc/edx-platform,msegado/edx-platform,knehez/edx-platform,unicri/edx-platform,doismellburning/edx-platform,jamesblunt/edx-platform,vismartltd/edx-platform,alexthered/kienhoc-platform,prarthitm/edxplatform,shubhdev/edx-platform,adoosii/edx-platform,beni55/edx-platform,miptliot/edx-platform,shubhdev/openedx,cecep-edu/edx-platform,polimediaupv/edx-platform,Edraak/edx-platform,nagyistoce/edx-platform,zerobatu/edx-platform,eestay/edx-platform,xuxiao19910803/edx-platform,hmcmooc/muddx-platform,jolyonb/edx-platform,cselis86/edx-platform,defance/edx-platform,antonve/s4-project-mooc,procangroup/edx-platform,ampax/edx-platform-backup,dcosentino/edx-platform,kxliugang/edx-platform,jbassen/edx-platform,polimediaupv/edx-platform,auferack08/edx-platform,romain-li/edx-platform,zhenzhai/edx-platform,andyzsf/edx,Livit/Livit.Learn.EdX,zhenzhai/edx-platform,wwj718/edx-platform,shashank971/edx-platform,edry/edx-platform,hkawasaki/kawasaki-aio8-2,edx-solutions/edx-platform,deepsrijit1105/edx-platform,10clouds/edx-platform,UOMx/edx-platform,nanolearningllc/edx-platform-cypress,procangroup/edx-platform,SravanthiSinha/edx-platform,rationalAgent/edx-platform-custom,jelugbo/tundex,marcore/edx-platform,franosincic/edx-platform,chrisndodge/edx-platform,jswope00/griffinx,appsembler/edx-platform,zadgroup/edx-platform,4eek/edx-platform,carsongee/edx-platform,Kalyzee/edx-platform,Edraak/edx-platform,zofuthan/edx-platform,bigdatauniversity/edx-platform,DNFcode/edx-platform,arifsetiawan/edx-platform,ahmadiga/min_edx,Softmotions/edx-platform,AkA84/edx-platform,simbs/edx-platform,leansoft/edx-platform,vismartltd/edx-platform,devs1991/test_edx_docmode,zofuthan/edx-platform,LearnEra/LearnEraPlaftform,BehavioralInsightsTeam/edx-platform,dsajkl/reqiop,hkawasaki/kawasaki-aio8-2,eduNEXT/edunext-platform,chand3040/cloud_that,IITBinterns13/edx-platform-dev,arifsetiawan/edx-platform,mahendra-r/edx-platform,chudaol/edx-platform,fintech-circle/edx-platform,pomegranited/edx-platform,etzhou/edx-platform,kmoocdev/edx-platform,Softmotions/edx-platform,mahendra-r/edx-platform,DNFcode/edx-platform,waheedahmed/edx-platform,MSOpenTech/edx-platform,ahmadiga/min_edx,Stanford-Online/edx-platform,yokose-ks/edx-platform,Lektorium-LLC/edx-platform,TsinghuaX/edx-platform,edx-solutions/edx-platform,ampax/edx-platform,jzoldak/edx-platform,valtech-mooc/edx-platform,SivilTaram/edx-platform,stvstnfrd/edx-platform,nikolas/edx-platform,nanolearningllc/edx-platform-cypress-2,cyanna/edx-platform,nanolearningllc/edx-platform-cypress-2,EduPepperPDTesting/pepper2013-testing,procangroup/edx-platform,ferabra/edx-platform,jruiperezv/ANALYSE,rhndg/openedx,vasyarv/edx-platform,10clouds/edx-platform,jonathan-beard/edx-platform,dsajkl/123,bdero/edx-platform,adoosii/edx-platform,arbrandes/edx-platform,ovnicraft/edx-platform,ahmedaljazzar/edx-platform,abdoosh00/edx-rtl-final,dsajkl/reqiop,rue89-tech/edx-platform,morpheby/levelup-by,fly19890211/edx-platform,rue89-tech/edx-platform,a-parhom/edx-platform,mbareta/edx-platform-ft,LearnEra/LearnEraPlaftform,eemirtekin/edx-platform,dkarakats/edx-platform,J861449197/edx-platform,EduPepperPDTesting/pepper2013-testing,jelugbo/tundex,chauhanhardik/populo_2,mbareta/edx-platform-ft,playm2mboy/edx-platform,a-parhom/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,bigdatauniversity/edx-platform,ferabra/edx-platform,UXE/local-edx,jswope00/GAI,ak2703/edx-platform,JioEducation/edx-platform,nttks/jenkins-test,EduPepperPD/pepper2013,antonve/s4-project-mooc,pku9104038/edx-platform,pabloborrego93/edx-platform,SravanthiSinha/edx-platform,xinjiguaike/edx-platform,amir-qayyum-khan/edx-platform,defance/edx-platform,jonathan-beard/edx-platform,mjirayu/sit_academy,antoviaque/edx-platform,abdoosh00/edx-rtl-final,LearnEra/LearnEraPlaftform,devs1991/test_edx_docmode,appsembler/edx-platform,10clouds/edx-platform,benpatterson/edx-platform,SivilTaram/edx-platform,morpheby/levelup-by,MSOpenTech/edx-platform,hkawasaki/kawasaki-aio8-0,UOMx/edx-platform,vasyarv/edx-platform,lduarte1991/edx-platform,CredoReference/edx-platform,eduNEXT/edunext-platform,UXE/local-edx,stvstnfrd/edx-platform,sudheerchintala/LearnEraPlatForm,chudaol/edx-platform,antonve/s4-project-mooc,atsolakid/edx-platform,Ayub-Khan/edx-platform,halvertoluke/edx-platform,jazkarta/edx-platform-for-isc,mushtaqak/edx-platform,vasyarv/edx-platform,Endika/edx-platform,motion2015/a3,dsajkl/reqiop,hmcmooc/muddx-platform,deepsrijit1105/edx-platform,WatanabeYasumasa/edx-platform,jazkarta/edx-platform-for-isc,zadgroup/edx-platform,franosincic/edx-platform,etzhou/edx-platform,nanolearningllc/edx-platform-cypress-2,carsongee/edx-platform,shabab12/edx-platform,alu042/edx-platform,bigdatauniversity/edx-platform,xuxiao19910803/edx,TeachAtTUM/edx-platform,Endika/edx-platform,martynovp/edx-platform,shubhdev/edx-platform,SivilTaram/edx-platform,mitocw/edx-platform,jjmiranda/edx-platform,openfun/edx-platform,CourseTalk/edx-platform,eduNEXT/edx-platform,openfun/edx-platform,ahmadiga/min_edx,gsehub/edx-platform,angelapper/edx-platform,AkA84/edx-platform,nanolearningllc/edx-platform-cypress,rationalAgent/edx-platform-custom,utecuy/edx-platform,dkarakats/edx-platform,openfun/edx-platform,jazkarta/edx-platform,kalebhartje/schoolboost,mahendra-r/edx-platform,cognitiveclass/edx-platform,rismalrv/edx-platform,BehavioralInsightsTeam/edx-platform,devs1991/test_edx_docmode,kursitet/edx-platform,edry/edx-platform,peterm-itr/edx-platform,cselis86/edx-platform,prarthitm/edxplatform,valtech-mooc/edx-platform,polimediaupv/edx-platform,wwj718/ANALYSE,atsolakid/edx-platform,jbassen/edx-platform,IndonesiaX/edx-platform,iivic/BoiseStateX,SravanthiSinha/edx-platform,syjeon/new_edx,romain-li/edx-platform,vikas1885/test1,sameetb-cuelogic/edx-platform-test,angelapper/edx-platform,Lektorium-LLC/edx-platform,BehavioralInsightsTeam/edx-platform,yokose-ks/edx-platform,J861449197/edx-platform,alexthered/kienhoc-platform,zerobatu/edx-platform,ferabra/edx-platform,naresh21/synergetics-edx-platform,apigee/edx-platform,louyihua/edx-platform,nagyistoce/edx-platform | ---
+++
@@ -25,4 +25,4 @@
del self._session[tuple(key)]
def has(self, key):
- return key in self._descriptor_model_data or key in self._session
+ return key.field_name in self._descriptor_model_data or tuple(key) in self._session |
4a67ee6df306fa7907ef76647446e46ae1bfea99 | erudite/components/knowledge_provider.py | erudite/components/knowledge_provider.py | """
Knowledge provider that will respond to requests made by the rdf publisher or another bot.
"""
from sleekxmpp.plugins.base import base_plugin
from rhobot.components.storage.client import StoragePayload
from rdflib.namespace import FOAF
import logging
logger = logging.getLogger(__name__)
class KnowledgeProvider(base_plugin):
name = 'knowledge_provider'
description = 'Knowledge Provider'
dependencies = {'rho_bot_storage_client', 'rho_bot_rdf_publish', }
type_requirements = {str(FOAF.Person), 'rho::owner', }
def plugin_init(self):
pass
def post_init(self):
base_plugin.post_init(self)
self.xmpp['rho_bot_rdf_publish'].add_message_handler(self._rdf_request_message)
def _rdf_request_message(self, rdf_payload):
logger.info('Looking up knowledge')
form = rdf_payload['form']
payload = StoragePayload(form)
intersection = self.type_requirements.intersection(set(payload.types()))
if len(intersection) == len(payload.types()):
results = self.xmpp['rho_bot_storage_client'].find_nodes(payload)
if len(results['command']['form'].get_items()):
return results['command']['form']
return None
knowledge_provider = KnowledgeProvider
| """
Knowledge provider that will respond to requests made by the rdf publisher or another bot.
"""
from sleekxmpp.plugins.base import base_plugin
from rhobot.components.storage.client import StoragePayload
from rdflib.namespace import FOAF
from rhobot.namespace import RHO
import logging
logger = logging.getLogger(__name__)
class KnowledgeProvider(base_plugin):
name = 'knowledge_provider'
description = 'Knowledge Provider'
dependencies = {'rho_bot_storage_client', 'rho_bot_rdf_publish', }
type_requirements = {str(FOAF.Person), str(RHO.Owner), }
def plugin_init(self):
pass
def post_init(self):
base_plugin.post_init(self)
self.xmpp['rho_bot_rdf_publish'].add_message_handler(self._rdf_request_message)
def _rdf_request_message(self, rdf_payload):
logger.info('Looking up knowledge')
form = rdf_payload['form']
payload = StoragePayload(form)
intersection = self.type_requirements.intersection(set(payload.types()))
if len(intersection) == len(payload.types()):
results = self.xmpp['rho_bot_storage_client'].find_nodes(payload)
if len(results['command']['form'].get_items()):
return results['command']['form']
return None
knowledge_provider = KnowledgeProvider
| Use string of Rho Namespace | Use string of Rho Namespace
Instead of the URI object, need to use the string.
| Python | bsd-3-clause | rerobins/rho_erudite | ---
+++
@@ -4,6 +4,7 @@
from sleekxmpp.plugins.base import base_plugin
from rhobot.components.storage.client import StoragePayload
from rdflib.namespace import FOAF
+from rhobot.namespace import RHO
import logging
logger = logging.getLogger(__name__)
@@ -14,7 +15,7 @@
description = 'Knowledge Provider'
dependencies = {'rho_bot_storage_client', 'rho_bot_rdf_publish', }
- type_requirements = {str(FOAF.Person), 'rho::owner', }
+ type_requirements = {str(FOAF.Person), str(RHO.Owner), }
def plugin_init(self):
pass |
154659f2126cd83e0a52af3d1a84620ca5c52409 | examples/generic_lpu/visualize_output.py | examples/generic_lpu/visualize_output.py |
import numpy as np
import neurokernel.LPU.utils.visualizer as vis
import networkx as nx
nx.readwrite.gexf.GEXF.convert_bool = {'false':False, 'False':False,
'true':True, 'True':True}
G = nx.read_gexf('./data/generic_lpu.gexf.gz')
neu_out = [k for k,n in G.node.items() if n['name'][:3] == 'out']
V = vis.visualizer()
V.add_LPU('./data/generic_input.h5', LPU='Sensory')
V.add_plot({'type':'waveform','ids':[[0]]}, 'input_Sensory')
V.add_LPU('generic_output_spike.h5', './data/generic_lpu.gexf.gz','Generic LPU')
V.add_plot({'type':'raster','ids':{0:range(48,83)},
'yticks':range(1,1+len(neu_out)),'yticklabels':range(len(neu_out))},
'Generic LPU','Output')
V._update_interval = 50
V.rows = 2
V.cols = 1
V.fontsize = 18
V.out_filename = 'output_generic.avi'
V.codec = 'libtheora'
V.dt = 0.0001
V.xlim = [0,1.0]
V.run()
|
import numpy as np
import matplotlib as mpl
mpl.use('agg')
import neurokernel.LPU.utils.visualizer as vis
import networkx as nx
nx.readwrite.gexf.GEXF.convert_bool = {'false':False, 'False':False,
'true':True, 'True':True}
G = nx.read_gexf('./data/generic_lpu.gexf.gz')
neu_out = [k for k,n in G.node.items() if n['name'][:3] == 'out']
V = vis.visualizer()
V.add_LPU('./data/generic_input.h5', LPU='Sensory')
V.add_plot({'type':'waveform','ids':[[0]]}, 'input_Sensory')
V.add_LPU('generic_output_spike.h5', './data/generic_lpu.gexf.gz','Generic LPU')
V.add_plot({'type':'raster','ids':{0:range(48,83)},
'yticks':range(1,1+len(neu_out)),'yticklabels':range(len(neu_out))},
'Generic LPU','Output')
V._update_interval = 50
V.rows = 2
V.cols = 1
V.fontsize = 18
V.out_filename = 'output_generic.avi'
V.codec = 'libtheora'
V.dt = 0.0001
V.xlim = [0,1.0]
V.run()
| Use AGG backend for generic visualization to avoid display dependency. | Use AGG backend for generic visualization to avoid display dependency.
| Python | bsd-3-clause | cerrno/neurokernel | ---
+++
@@ -1,5 +1,8 @@
import numpy as np
+import matplotlib as mpl
+mpl.use('agg')
+
import neurokernel.LPU.utils.visualizer as vis
import networkx as nx
|
d38f36e0b783c1a1f72ead46b90a982fcbe34488 | tools/conan/conanfile.py | tools/conan/conanfile.py | from conans import ConanFile, CMake, tools
class LibWFUTConan(ConanFile):
name = "libwfut"
version = "0.2.4"
license = "GPL-2.0+"
author = "Erik Ogenvik <erik@ogenvik.org>"
homepage = "https://www.worldforge.org"
url = "https://github.com/worldforge/libwfut"
description = "A client side C++ implementation of WFUT (WorldForge Update Tool)."
topics = ("mmorpg", "worldforge")
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [False, True], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
generators = "cmake"
requires = ["sigc++/2.10.0@worldforge/stable",
"zlib/1.2.11",
"libcurl/7.66.0@worldforge/stable"]
scm = {
"type": "git",
"url": "https://github.com/worldforge/libwfut.git",
"revision": "auto"
}
def build(self):
cmake = CMake(self)
cmake.configure(source_folder=".")
cmake.build()
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
self.cpp_info.includedirs = ["include/wfut-0.2"]
def package(self):
pass
| from conans import ConanFile, CMake, tools
class LibWFUTConan(ConanFile):
name = "libwfut"
version = "0.2.4"
license = "GPL-2.0+"
author = "Erik Ogenvik <erik@ogenvik.org>"
homepage = "https://www.worldforge.org"
url = "https://github.com/worldforge/libwfut"
description = "A client side C++ implementation of WFUT (WorldForge Update Tool)."
topics = ("mmorpg", "worldforge")
settings = "os", "compiler", "build_type", "arch"
options = {"shared": [False, True], "fPIC": [True, False]}
default_options = {"shared": False, "fPIC": True}
generators = "cmake"
requires = ["sigc++/2.10.0@worldforge/stable",
"zlib/1.2.11",
"libcurl/7.75.0"]
scm = {
"type": "git",
"url": "https://github.com/worldforge/libwfut.git",
"revision": "auto"
}
def build(self):
cmake = CMake(self)
cmake.configure(source_folder=".")
cmake.build()
cmake.install()
def package_info(self):
self.cpp_info.libs = tools.collect_libs(self)
self.cpp_info.includedirs = ["include/wfut-0.2"]
def package(self):
pass
| Use Conan Center provided libcurl. | Use Conan Center provided libcurl.
| Python | lgpl-2.1 | worldforge/libwfut,worldforge/libwfut,worldforge/libwfut,worldforge/libwfut | ---
+++
@@ -16,7 +16,7 @@
generators = "cmake"
requires = ["sigc++/2.10.0@worldforge/stable",
"zlib/1.2.11",
- "libcurl/7.66.0@worldforge/stable"]
+ "libcurl/7.75.0"]
scm = {
"type": "git", |
68287347cacae6a7836698cfb11523c8819628c9 | virustotal/vt.py | virustotal/vt.py | from threading import Semaphore
from os.path import basename
from rate import ratelimiter
import requests
class VirusTotal():
def __init__(self, apikey, limit=4, every=60):
self.semaphore = Semaphore(limit)
self.apikey = apikey
self.every = every
def scan(self, path):
with ratelimiter(self.semaphore, self.every):
with open(path, 'rb') as f:
params = {'apikey': self.apikey}
files = {'file': (basename(path), f)}
url = 'https://www.virustotal.com/vtapi/v2/file/scan'
res = requests.post(url, files=files, params=params)
return res.json()['resource']
def get_report(self, resource):
with ratelimiter(self.semaphore, self.every):
params = {'apikey': apikey, 'resource': resource}
url = 'https://www.virustotal.com/vtapi/v2/file/report'
return requests.get(url, params=params).json()
def get_num_detected(self, resource):
report = self.get_report(resource)
return sum(scan['detected'] for av, scan in report['scans'].items())
def get_percent_detected(self, resource):
report = self.get_report(resource)
return (sum(scan['detected'] for av, scan in report['scans'].items()) /
float(report['scans']))
| from threading import Semaphore
from os.path import basename
from rate import ratelimiter
import requests
class VirusTotal():
def __init__(self, apikey, limit=4, every=60):
self.semaphore = threading.Semaphore(limit)
self.apikey = apikey
self.every = every
def scan(self, path):
with ratelimiter(self.semaphore, self.every):
with open(path, 'rb') as f:
params = {'apikey': self.apikey}
files = {'file': (basename(path), f)}
url = 'https://www.virustotal.com/vtapi/v2/file/scan'
res = requests.post(url, files=files, params=params)
return res.json()['resource']
def get_report(self, resource):
with ratelimiter(self.semaphore, self.every):
params = {'apikey': self.apikey, 'resource': resource}
url = 'https://www.virustotal.com/vtapi/v2/file/report'
return requests.get(url, params=params).json()
def get_num_detected(self, resource):
report = self.get_report(resource)
return sum(scan['detected'] for av, scan in report['scans'].items())
def get_percent_detected(self, resource):
report = self.get_report(resource)
return (sum(scan['detected'] for av, scan in report['scans'].items()) /
float(report['scans']))
| Add apikey to get_report api | Add apikey to get_report api
| Python | mit | enricobacis/playscraper | ---
+++
@@ -7,7 +7,7 @@
class VirusTotal():
def __init__(self, apikey, limit=4, every=60):
- self.semaphore = Semaphore(limit)
+ self.semaphore = threading.Semaphore(limit)
self.apikey = apikey
self.every = every
@@ -22,7 +22,7 @@
def get_report(self, resource):
with ratelimiter(self.semaphore, self.every):
- params = {'apikey': apikey, 'resource': resource}
+ params = {'apikey': self.apikey, 'resource': resource}
url = 'https://www.virustotal.com/vtapi/v2/file/report'
return requests.get(url, params=params).json()
|
b2adc19e5f28c16bc7cfcd38ed35043d7b1fbe29 | profile_collection/startup/15-machines.py | profile_collection/startup/15-machines.py | from ophyd import PVPositionerPC, EpicsSignal, EpicsSignalRO
from ophyd import Component as Cpt
# Undulator
class Undulator(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-LEnc}Gap')
setpoint = Cpt(EpicsSignal, '-Mtr:2}Inp:Pos')
actuate = Cpt(EpicsSignal, '-Mtr:2}Sw:Go')
actuate_value = 1
stop_signal = Cpt(EpicsSignal, '-Mtr:2}Pos.STOP')
stop_value = 1
ivu_gap = Undulator('SR:C11-ID:G1{IVU20:1', name='ivu_gap')
ivu_gap.readback = 'ivu_gap'
# This class is defined in 10-optics.py
fe = VirtualMotorCenterAndGap('FE:C11A-OP{Slt:12', name='fe') # Front End Slits (Primary Slits)
| from ophyd import PVPositionerPC, EpicsSignal, EpicsSignalRO
from ophyd import Component as Cpt
# Undulator
class Undulator(PVPositionerPC):
readback = Cpt(EpicsSignalRO, '-LEnc}Gap')
setpoint = Cpt(EpicsSignal, '-Mtr:2}Inp:Pos')
actuate = Cpt(EpicsSignal, '-Mtr:2}Sw:Go')
actuate_value = 1
stop_signal = Cpt(EpicsSignal, '-Mtr:2}Pos.STOP')
stop_value = 1
ivu_gap = Undulator('SR:C11-ID:G1{IVU20:1', name='ivu_gap')
# ivu_gap.readback = 'ivu_gap' ####what the ^*(*()**)(* !!!!
#To solve the "KeyError Problem" when doing dscan and trying to save to a spec file, Y.G., 20170110
ivu_gap.readback.name = 'ivu_gap'
# This class is defined in 10-optics.py
fe = VirtualMotorCenterAndGap('FE:C11A-OP{Slt:12', name='fe') # Front End Slits (Primary Slits)
| ADD readback.name in iuv_gap to fix speck save prob | ADD readback.name in iuv_gap to fix speck save prob
| Python | bsd-2-clause | NSLS-II-CHX/ipython_ophyd,NSLS-II-CHX/ipython_ophyd | ---
+++
@@ -12,7 +12,9 @@
stop_value = 1
ivu_gap = Undulator('SR:C11-ID:G1{IVU20:1', name='ivu_gap')
-ivu_gap.readback = 'ivu_gap'
+# ivu_gap.readback = 'ivu_gap' ####what the ^*(*()**)(* !!!!
+#To solve the "KeyError Problem" when doing dscan and trying to save to a spec file, Y.G., 20170110
+ivu_gap.readback.name = 'ivu_gap'
# This class is defined in 10-optics.py
fe = VirtualMotorCenterAndGap('FE:C11A-OP{Slt:12', name='fe') # Front End Slits (Primary Slits) |
d5b068b2efc5fca30014ac7b4d58123461bfbdc1 | djedi/utils/templates.py | djedi/utils/templates.py | import json
from django.core.exceptions import ImproperlyConfigured
from ..compat import NoReverseMatch, render, render_to_string, reverse
def render_embed(nodes=None, request=None):
context = {}
if nodes is None:
try:
prefix = request.build_absolute_uri("/").rstrip("/")
context.update(
{
"cms_url": prefix + reverse("admin:djedi:cms"),
"exclude_json_nodes": True,
}
)
output = render(request, "djedi/cms/embed.html", context)
except NoReverseMatch:
raise ImproperlyConfigured(
"Could not find djedi in your url conf, "
"include djedi.rest.urls within the djedi namespace."
)
else:
context.update(
{
"cms_url": reverse("admin:djedi:cms"),
"exclude_json_nodes": False,
"json_nodes": json.dumps(nodes).replace("</", "\\x3C/"),
}
)
output = render_to_string("djedi/cms/embed.html", context)
return output
| import json
from django.core.exceptions import ImproperlyConfigured
from ..compat import NoReverseMatch, render, render_to_string, reverse
def render_embed(nodes=None, request=None):
context = {}
if nodes is None:
try:
prefix = request.build_absolute_uri("/").rstrip("/")
context.update(
{
"cms_url": prefix + reverse("admin:djedi:cms"),
"exclude_json_nodes": True,
}
)
output = render(request, "djedi/cms/embed.html", context)
except NoReverseMatch:
raise ImproperlyConfigured(
"Could not find djedi in your url conf, "
"enable django admin or include "
"djedi.urls within the admin namespace."
)
else:
context.update(
{
"cms_url": reverse("admin:djedi:cms"),
"exclude_json_nodes": False,
"json_nodes": json.dumps(nodes).replace("</", "\\x3C/"),
}
)
output = render_to_string("djedi/cms/embed.html", context)
return output
| Update rest api url config error message | Update rest api url config error message
| Python | bsd-3-clause | 5monkeys/djedi-cms,5monkeys/djedi-cms,5monkeys/djedi-cms | ---
+++
@@ -21,7 +21,8 @@
except NoReverseMatch:
raise ImproperlyConfigured(
"Could not find djedi in your url conf, "
- "include djedi.rest.urls within the djedi namespace."
+ "enable django admin or include "
+ "djedi.urls within the admin namespace."
)
else: |
248fda4f499375b24a2f670569259f0904948b7e | troposphere/detective.py | troposphere/detective.py | # Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
"DisableEmailNotification": (boolean, False),
"GraphArn": (str, True),
"MemberEmailAddress": (str, True),
"MemberId": (str, True),
"Message": (str, False),
}
| # Copyright (c) 2020, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, Tags
from .validators import boolean
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
props = {
"Tags": (Tags, False),
}
class MemberInvitation(AWSObject):
resource_type = "AWS::Detective::MemberInvitation"
props = {
"DisableEmailNotification": (boolean, False),
"GraphArn": (str, True),
"MemberEmailAddress": (str, True),
"MemberId": (str, True),
"Message": (str, False),
}
| Update Detective per 2021-04-29 changes | Update Detective per 2021-04-29 changes
| Python | bsd-2-clause | cloudtools/troposphere,cloudtools/troposphere | ---
+++
@@ -3,14 +3,16 @@
#
# See LICENSE file for full license.
-from . import AWSObject
+from . import AWSObject, Tags
from .validators import boolean
class Graph(AWSObject):
resource_type = "AWS::Detective::Graph"
- props = {}
+ props = {
+ "Tags": (Tags, False),
+ }
class MemberInvitation(AWSObject): |
7c0d68b1bce27d026b69e3a069c549ab560b0f3d | spillway/mixins.py | spillway/mixins.py | class FormMixin(object):
"""Mixin to provide form validation and data cleaning of GET or POST
requests.
"""
form_class = None
@property
def form(self):
"""Returns a validated form dict or an empty dict."""
_form = getattr(self, '_form', False)
if not _form:
self._form = self.form_class(self.request.GET or self.request.POST,
self.request.FILES or None)
valid = self._form.is_valid()
return self._form
| class FormMixin(object):
"""Mixin to provide form validation and data cleaning of GET or POST
requests.
"""
form_class = None
@property
def form(self):
"""Returns a validated form dict or an empty dict."""
_form = getattr(self, '_form', False)
if not _form:
self._form = self.form_class(
self.request.QUERY_PARAMS or self.request.DATA,
self.request.FILES or None)
valid = self._form.is_valid()
return self._form
| Use DRF query params and data request attrs | Use DRF query params and data request attrs
| Python | bsd-3-clause | barseghyanartur/django-spillway,kuzmich/django-spillway,bkg/django-spillway | ---
+++
@@ -9,7 +9,8 @@
"""Returns a validated form dict or an empty dict."""
_form = getattr(self, '_form', False)
if not _form:
- self._form = self.form_class(self.request.GET or self.request.POST,
- self.request.FILES or None)
+ self._form = self.form_class(
+ self.request.QUERY_PARAMS or self.request.DATA,
+ self.request.FILES or None)
valid = self._form.is_valid()
return self._form |
f62278c420429cfe9a3f2a8903f902ae24bdd95d | remoteappmanager/handlers/home_handler.py | remoteappmanager/handlers/home_handler.py | from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
images_info = yield self._get_images_info()
self.render('home.html', images_info=images_info)
# private
@gen.coroutine
def _get_images_info(self):
"""Retrieves a dictionary containing the image and the associated
container, if active, as values."""
container_manager = self.application.container_manager
apps = self.application.db.get_apps_for_user(
self.current_user.account)
images_info = []
for mapping_id, app, policy in apps:
image = yield container_manager.image(app.image)
if image is None:
# The user has access to an application that is no longer
# available in docker. We just move on.
continue
containers = yield container_manager.containers_from_mapping_id(
self.current_user.name,
mapping_id)
# We assume that we can only run one container only (although the
# API considers a broader possibility for future extension.
container = None
if len(containers):
container = containers[0]
images_info.append({
"image": image,
"mapping_id": mapping_id,
"container": container
})
return images_info
| from tornado import gen, web
from remoteappmanager.handlers.base_handler import BaseHandler
class HomeHandler(BaseHandler):
"""Render the user's home page"""
@web.authenticated
@gen.coroutine
def get(self):
self.render('home.html')
| Remove dead code now part of the REST API. | Remove dead code now part of the REST API.
| Python | bsd-3-clause | simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote,simphony/simphony-remote | ---
+++
@@ -9,43 +9,4 @@
@web.authenticated
@gen.coroutine
def get(self):
- images_info = yield self._get_images_info()
- self.render('home.html', images_info=images_info)
-
- # private
-
- @gen.coroutine
- def _get_images_info(self):
- """Retrieves a dictionary containing the image and the associated
- container, if active, as values."""
- container_manager = self.application.container_manager
-
- apps = self.application.db.get_apps_for_user(
- self.current_user.account)
-
- images_info = []
-
- for mapping_id, app, policy in apps:
- image = yield container_manager.image(app.image)
-
- if image is None:
- # The user has access to an application that is no longer
- # available in docker. We just move on.
- continue
-
- containers = yield container_manager.containers_from_mapping_id(
- self.current_user.name,
- mapping_id)
-
- # We assume that we can only run one container only (although the
- # API considers a broader possibility for future extension.
- container = None
- if len(containers):
- container = containers[0]
-
- images_info.append({
- "image": image,
- "mapping_id": mapping_id,
- "container": container
- })
- return images_info
+ self.render('home.html') |
20ecbcf76581caca255572e634883bc3746fe41f | src/metpy/__init__.py | src/metpy/__init__.py | # Copyright (c) 2015,2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tools for reading, calculating, and plotting with weather data."""
# What do we want to pull into the top-level namespace?
import os
import sys
import warnings
if sys.version_info < (3,):
raise ImportError(
"""You are running MetPy 0.12.0 or greater on Python 2.
MetPy 0.12.0 and above are no longer compatible with Python 2, but this version was
still installed. Sorry about that; it should not have happened. Make sure you have
pip >= 9.0 to avoid this kind of issue, as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Your choices:
- Upgrade to Python 3.
- Install an older version of MetPy:
$ pip install 'metpy=0.11.1'
""")
# Must occur before below imports
warnings.filterwarnings('ignore', 'numpy.dtype size changed')
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
from ._version import get_version # noqa: E402
from .xarray import * # noqa: F401, F403
__version__ = get_version()
del get_version
| # Copyright (c) 2015,2019 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tools for reading, calculating, and plotting with weather data."""
# What do we want to pull into the top-level namespace?
import os
import sys
import warnings
if sys.version_info < (3,):
raise ImportError(
"""You are running MetPy 0.12.0 or greater on Python 2.
MetPy 0.12.0 and above are no longer compatible with Python 2, but this version was
still installed. Sorry about that; it should not have happened. Make sure you have
pip >= 9.0 to avoid this kind of issue, as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Your choices:
- Upgrade to Python 3.
- Install an older version of MetPy:
$ pip install 'metpy=0.11.1'
""")
# Must occur before below imports
warnings.filterwarnings('ignore', 'numpy.dtype size changed')
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
from ._version import get_version # noqa: E402
from .xarray import * # noqa: F401, F403, E402
__version__ = get_version()
del get_version
| Fix new lint found by flake8 | MNT: Fix new lint found by flake8
| Python | bsd-3-clause | ShawnMurd/MetPy,Unidata/MetPy,dopplershift/MetPy,dopplershift/MetPy,Unidata/MetPy | ---
+++
@@ -32,6 +32,6 @@
os.environ['PINT_ARRAY_PROTOCOL_FALLBACK'] = '0'
from ._version import get_version # noqa: E402
-from .xarray import * # noqa: F401, F403
+from .xarray import * # noqa: F401, F403, E402
__version__ = get_version()
del get_version |
e18153ba918592bedb1ae363afc2b437db7576db | examples/livestream_datalogger.py | examples/livestream_datalogger.py | from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print "Received samples %d to %d from channel %d" % (idx, idx + len(d), ch)
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print e
except Exception as e:
print traceback.format_exc()
finally:
i.datalogger_stop()
m.close()
| from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print "Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch)
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print e
except Exception as e:
print traceback.format_exc()
finally:
i.datalogger_stop()
m.close()
| Fix trivial off by 1 in the livestream datalogger example | Datalogger: Fix trivial off by 1 in the livestream datalogger example
| Python | mit | benizl/pymoku,liquidinstruments/pymoku | ---
+++
@@ -24,7 +24,7 @@
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
- print "Received samples %d to %d from channel %d" % (idx, idx + len(d), ch)
+ print "Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch)
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print e |
916b86865acf0297293e4a13f1da6838f9b2711f | scripts/lib/errors.py | scripts/lib/errors.py | """ Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from lib.config import emergency_id
from lib.commands import vk, api
class ErrorManager:
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
def __init__(self, name):
self.name = name
def __enter__(self):
pass
def __exit__(self, *args):
if args[0] is not None:
sendErrorMessage(self.name)
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
| """ Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
from contextlib import contextmanager
from lib.config import emergency_id
from lib.commands import vk, api
@contextmanager
def ErrorManager(name):
""" Упрощенное оповещение об ошибках
str name: название скрипта (обычно укороченное)
Использование: with ErrorManager(name): main()
"""
try:
yield
except Exception as e:
sendErrorMessage(name)
raise e
def sendErrorMessage(name, exception=None):
""" Использует либо полученную ошибку, либо ту, что возникла последней """
exception = format_error(exception)
message = "{}:\n{}".format(name, exception)
vk(api.messages.send, user_id=emergency_id, message=message)
def format_error(error):
if error is not None:
error_info = format_exception(type(error), error, error.__traceback__)
return "".join(error_info)
else:
return format_exc()
| Change error class to function | Change error class to function
| Python | mit | Varabe/Guild-Manager | ---
+++
@@ -1,25 +1,23 @@
""" Оповещение администратора о возникших ошибках """
from traceback import format_exception, format_exc
+from contextlib import contextmanager
from lib.config import emergency_id
from lib.commands import vk, api
-class ErrorManager:
+@contextmanager
+def ErrorManager(name):
""" Упрощенное оповещение об ошибках
- str name: название скрипта (обычно укороченное)
- Использование: with ErrorManager(name): main()
+ str name: название скрипта (обычно укороченное)
+ Использование: with ErrorManager(name): main()
"""
- def __init__(self, name):
- self.name = name
-
- def __enter__(self):
- pass
-
- def __exit__(self, *args):
- if args[0] is not None:
- sendErrorMessage(self.name)
+ try:
+ yield
+ except Exception as e:
+ sendErrorMessage(name)
+ raise e
def sendErrorMessage(name, exception=None): |
146e35f48774173c2000b8a9790cdbe6925ba94a | opps/contrib/multisite/admin.py | opps/contrib/multisite/admin.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from .models import SitePermission
admin.site.register(SitePermission)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
from django.utils import timezone
from .models import SitePermission
class AdminViewPermission(admin.ModelAdmin):
def queryset(self, request):
queryset = super(AdminViewPermission, self).queryset(request)
try:
sitepermission = SitePermission.objects.get(
user=request.user,
date_available__lte=timezone.now(),
published=True)
return queryset.filter(site_iid=sitepermission.site_iid)
except SitePermission.DoesNotExist:
pass
return queryset
admin.site.register(SitePermission)
| Create AdminViewPermission on contrib multisite | Create AdminViewPermission on contrib multisite
| Python | mit | opps/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps | ---
+++
@@ -1,7 +1,24 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.contrib import admin
+from django.utils import timezone
+
from .models import SitePermission
+class AdminViewPermission(admin.ModelAdmin):
+
+ def queryset(self, request):
+ queryset = super(AdminViewPermission, self).queryset(request)
+ try:
+ sitepermission = SitePermission.objects.get(
+ user=request.user,
+ date_available__lte=timezone.now(),
+ published=True)
+ return queryset.filter(site_iid=sitepermission.site_iid)
+ except SitePermission.DoesNotExist:
+ pass
+ return queryset
+
+
admin.site.register(SitePermission) |
7e25a0097c3a4e7d37d75f6e90bcee2883df0a46 | analysis/opening_plot.py | analysis/opening_plot.py | #!/usr/bin/python
import sys
def parse_opening_list(filename):
with open(filename) as f:
open_count = dict()
openings = []
for line in (raw.strip() for raw in f):
open_count.setdefault(line, 0)
open_count[line] += 1
openings.append(line)
top10 = list(reversed(sorted(open_count.keys(),
key=lambda x: open_count[x])[-10:]))
movsum_window = 1000
last_window = openings[-movsum_window:]
top10_rate = list(reversed(sorted(open_count.keys(),
key=lambda x : last_window.count(x))[-10:]))
for data in [[top10, '_top_opening_data.txt'], [top10_rate, '_top_opening_rate_data.txt']]:
with open(filename + data[1] , 'w') as out:
out.write(','.join(top10) + '\n')
for opening in openings:
marker = ['1' if x == opening else '0' for x in data[0]]
out.write(','.join(marker) + '\n')
if __name__ == '__main__':
parse_opening_list(sys.argv[1])
| #!/usr/bin/python
import sys
def parse_opening_list(filename):
with open(filename) as f:
open_count = dict()
openings = []
for line in (raw.strip() for raw in f):
open_count.setdefault(line, 0)
open_count[line] += 1
openings.append(line)
top10 = list(reversed(sorted(open_count.keys(),
key=lambda x: open_count[x])[-10:]))
movsum_window = 1000
last_window = openings[-movsum_window:]
top10_rate = list(reversed(sorted(open_count.keys(),
key=lambda x : last_window.count(x))[-10:]))
for data in [[top10, '_top_opening_data.txt'], [top10_rate, '_top_opening_rate_data.txt']]:
with open(filename + data[1] , 'w') as out:
out.write(','.join(data[0]) + '\n')
for opening in openings:
marker = ['1' if x == opening else '0' for x in data[0]]
out.write(','.join(marker) + '\n')
if __name__ == '__main__':
parse_opening_list(sys.argv[1])
| Write correct titles for opening plots | Write correct titles for opening plots
| Python | mit | MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess,MarkZH/Genetic_Chess | ---
+++
@@ -21,7 +21,7 @@
for data in [[top10, '_top_opening_data.txt'], [top10_rate, '_top_opening_rate_data.txt']]:
with open(filename + data[1] , 'w') as out:
- out.write(','.join(top10) + '\n')
+ out.write(','.join(data[0]) + '\n')
for opening in openings:
marker = ['1' if x == opening else '0' for x in data[0]]
out.write(','.join(marker) + '\n') |
bc92988baee2186fe5b746751fb5d2e3ec6cb8d9 | statzlogger.py | statzlogger.py | import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def emit(self, record):
pass
class Collection(StatzHandler):
def __init__(self, level=logging.NOTSET):
StatzHandler.__init__(self, level)
self.indices = {}
def emit(self, record):
indices = getattr(record, "indices", [])
indices.append(getattr(record, "index", None))
for index in indices:
self.indices.setdefault(index, []).append(record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
| import logging
try:
NullHandler = logging.NullHandler
except AttributeError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger("statzlogger")
log.addHandler(NullHandler())
class StatzHandler(logging.Handler):
def __init__(self, level=logging.NOTSET):
logging.Handler.__init__(self, level)
self.indices = {}
def emit(self, record):
pass
class Collection(StatzHandler):
def emit(self, record):
indices = getattr(record, "indices", [])
indices.append(getattr(record, "index", None))
for index in indices:
self.indices.setdefault(index, []).append(record)
class Sum(StatzHandler):
pass
class Top(StatzHandler):
pass
| Index creation should apply across the board. | Index creation should apply across the board.
| Python | isc | whilp/statzlogger | ---
+++
@@ -12,14 +12,14 @@
class StatzHandler(logging.Handler):
+ def __init__(self, level=logging.NOTSET):
+ logging.Handler.__init__(self, level)
+ self.indices = {}
+
def emit(self, record):
pass
class Collection(StatzHandler):
-
- def __init__(self, level=logging.NOTSET):
- StatzHandler.__init__(self, level)
- self.indices = {}
def emit(self, record):
indices = getattr(record, "indices", []) |
dbe5c25e302b4b71603a94a9519e74605714284c | generic_links/migrations/0001_initial.py | generic_links/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='GenericLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('object_id', models.PositiveIntegerField(db_index=True)),
('url', models.URLField()),
('title', models.CharField(max_length=200)),
('description', models.TextField(max_length=1000, null=True, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True, db_index=True)),
('is_external', models.BooleanField(default=True, db_index=True)),
('content_type', models.ForeignKey(to='contenttypes.ContentType')),
('user', models.ForeignKey(blank=True, to=settings.AUTH_USER_MODEL, null=True)),
],
options={
'ordering': ('-created_at',),
'verbose_name': 'Generic Link',
'verbose_name_plural': 'Generic Links',
},
),
]
| Remove migration dependency from Django 1.8 | Remove migration dependency from Django 1.8 | Python | bsd-3-clause | matagus/django-generic-links,matagus/django-generic-links | ---
+++
@@ -8,7 +8,6 @@
class Migration(migrations.Migration):
dependencies = [
- ('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
|
a3eb818fb9201d5fdf520ce87c9da1d11e1c7e75 | denim/constants.py | denim/constants.py | # -*- encoding:utf8 -*-
from fabric.api import env
class RootUser(object):
"""
Class to define Root user.
"""
uid=0
@classmethod
def sudo_identity(cls):
return None
class DeployUser(object):
"""
Class to define Deploy User.
"""
@classmethod
def sudo_identity(cls):
return env.deploy_user
| # -*- encoding:utf8 -*-
from fabric.api import env
class UserBase(object):
pass
class RootUser(UserBase):
"""
Class to define Root user.
"""
uid=0
@classmethod
def sudo_identity(cls):
return None
class DeployUser(UserBase):
"""
Class to define Deploy User.
"""
@classmethod
def sudo_identity(cls):
return env.deploy_user
| Fix issue running database creation, should be run as the deployment user | Fix issue running database creation, should be run as the deployment user
| Python | bsd-2-clause | timsavage/denim | ---
+++
@@ -2,7 +2,11 @@
from fabric.api import env
-class RootUser(object):
+class UserBase(object):
+ pass
+
+
+class RootUser(UserBase):
"""
Class to define Root user.
"""
@@ -13,7 +17,7 @@
return None
-class DeployUser(object):
+class DeployUser(UserBase):
"""
Class to define Deploy User.
""" |
9bdfdc860264aa200d74bcaf813c2f5055307a3b | webapp/tests/__init__.py | webapp/tests/__init__.py | # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
db.app = self.app
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
| # -*- coding: utf-8 -*-
from unittest import TestCase
from byceps.application import create_app
from byceps.blueprints.brand.models import Brand
from byceps.blueprints.party.models import Party
from byceps.database import db
class AbstractAppTestCase(TestCase):
def setUp(self):
self.app = create_app('test', initialize=False)
self.db = db
db.app = self.app
db.create_all()
self.create_brand_and_party()
self.client = self.app.test_client()
def create_brand_and_party(self):
brand = Brand(id='acme', title='ACME')
db.session.add(brand)
party = Party(id='acme-2014', brand=brand, title='ACME 2014')
db.session.add(party)
db.session.commit()
def tearDown(self):
db.session.remove()
db.drop_all()
| Prepare application test with current brand and party. | Prepare application test with current brand and party.
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps | ---
+++
@@ -3,6 +3,8 @@
from unittest import TestCase
from byceps.application import create_app
+from byceps.blueprints.brand.models import Brand
+from byceps.blueprints.party.models import Party
from byceps.database import db
@@ -11,8 +13,20 @@
def setUp(self):
self.app = create_app('test', initialize=False)
+ self.db = db
db.app = self.app
db.create_all()
+
+ self.create_brand_and_party()
+
+ self.client = self.app.test_client()
+
+ def create_brand_and_party(self):
+ brand = Brand(id='acme', title='ACME')
+ db.session.add(brand)
+ party = Party(id='acme-2014', brand=brand, title='ACME 2014')
+ db.session.add(party)
+ db.session.commit()
def tearDown(self):
db.session.remove() |
af3a124c8608fc516a0b78b25da0d4c96aef68da | avahi-daemon/dbus-test.py | avahi-daemon/dbus-test.py | #!/usr/bin/python2.4
import dbus
import dbus.glib
import gtk
from time import sleep
bus = dbus.SystemBus()
server = dbus.Interface(bus.get_object("org.freedesktop.Avahi", '/org/freedesktop/Avahi/Server'), 'org.freedesktop.Avahi.Server')
print "Host name: %s" % server.GetHostName()
print "Domain name: %s" % server.GetDomainName()
print "FQDN: %s" % server.GetHostNameFqdn()
g = dbus.Interface(bus.get_object("org.freedesktop.Avahi", server.EntryGroupNew()), 'org.freedesktop.Avahi.EntryGroup')
def state_changed_callback(t):
print "StateChanged: ", t
g.connect_to_signal('StateChanged', state_changed_callback)
g.AddService(0, 0, "_http._tcp", "foo", "", "", dbus.UInt16(4712), ["fuck=hallo", "gurke=mega"])
g.AddAddress(0, 0, "foo.local", "47.11.8.15")
g.Commit()
try:
gtk.main()
except KeyboardInterrupt, k:
pass
g.Free()
print "Quit"
| #!/usr/bin/python2.4
import dbus
import dbus.glib
import gtk
from time import sleep
bus = dbus.SystemBus()
server = dbus.Interface(bus.get_object("org.freedesktop.Avahi", '/org/freedesktop/Avahi/Server'), 'org.freedesktop.Avahi.Server')
def server_state_changed_callback(t):
print "Server::StateChanged: ", t
server.connect_to_signal("StateChanged", server_state_changed_callback)
print "Host name: %s" % server.GetHostName()
print "Domain name: %s" % server.GetDomainName()
print "FQDN: %s" % server.GetHostNameFqdn()
g = dbus.Interface(bus.get_object("org.freedesktop.Avahi", server.EntryGroupNew()), 'org.freedesktop.Avahi.EntryGroup')
def entry_group_state_changed_callback(t):
print "EntryGroup::StateChanged: ", t
g.connect_to_signal('StateChanged', entry_group_state_changed_callback)
g.AddService(0, 0, "_http._tcp", "foo", "", "", dbus.UInt16(4712), ["fuck=hallo", "gurke=mega"])
g.AddAddress(0, 0, "foo.local", "47.11.8.15")
g.Commit()
try:
gtk.main()
except KeyboardInterrupt, k:
pass
g.Free()
print "Quit"
| Make use of StateChanged signal of DBUS Server object | Make use of StateChanged signal of DBUS Server object
git-svn-id: ff687e355030673c307e7da231f59639d58f56d5@172 941a03a8-eaeb-0310-b9a0-b1bbd8fe43fe
| Python | lgpl-2.1 | Distrotech/avahi,Kisensum/xmDNS-avahi,Kisensum/xmDNS-avahi,heftig/avahi,catta-x/catta,lathiat/avahi,catta-x/catta,lathiat/avahi,heftig/avahi,sunilghai/avahi-clone,sunilghai/avahi-clone,lathiat/avahi,heftig/avahi-1,catta-x/catta,Distrotech/avahi,Distrotech/avahi,heftig/avahi,heftig/avahi-1,heftig/avahi-1,Kisensum/xmDNS-avahi,everbase/catta,lathiat/avahi,heftig/avahi-1,gloryleague/avahi,sunilghai/avahi-clone,lathiat/avahi,Kisensum/xmDNS-avahi,gloryleague/avahi,heftig/avahi-1,gloryleague/avahi,gloryleague/avahi,Kisensum/xmDNS-avahi,everbase/catta,heftig/avahi,Kisensum/xmDNS-avahi,heftig/avahi-1,Distrotech/avahi,gloryleague/avahi,everbase/catta,Distrotech/avahi,sunilghai/avahi-clone,gloryleague/avahi,lathiat/avahi,heftig/avahi,sunilghai/avahi-clone,heftig/avahi,Distrotech/avahi,sunilghai/avahi-clone | ---
+++
@@ -10,16 +10,22 @@
server = dbus.Interface(bus.get_object("org.freedesktop.Avahi", '/org/freedesktop/Avahi/Server'), 'org.freedesktop.Avahi.Server')
+def server_state_changed_callback(t):
+ print "Server::StateChanged: ", t
+
+server.connect_to_signal("StateChanged", server_state_changed_callback)
+
print "Host name: %s" % server.GetHostName()
print "Domain name: %s" % server.GetDomainName()
print "FQDN: %s" % server.GetHostNameFqdn()
g = dbus.Interface(bus.get_object("org.freedesktop.Avahi", server.EntryGroupNew()), 'org.freedesktop.Avahi.EntryGroup')
-def state_changed_callback(t):
- print "StateChanged: ", t
+def entry_group_state_changed_callback(t):
+ print "EntryGroup::StateChanged: ", t
-g.connect_to_signal('StateChanged', state_changed_callback)
+g.connect_to_signal('StateChanged', entry_group_state_changed_callback)
+
g.AddService(0, 0, "_http._tcp", "foo", "", "", dbus.UInt16(4712), ["fuck=hallo", "gurke=mega"])
g.AddAddress(0, 0, "foo.local", "47.11.8.15")
g.Commit() |
50488976619795621b5eb6dd3e427f6f82188426 | peanut/template.py | peanut/template.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Template"""
import peanut
import jinja2
from os import path
from jinja2 import FileSystemLoader
from jinja2.exceptions import TemplateNotFound
class SmartLoader(FileSystemLoader):
"""A smart template loader"""
available_extension = ['.html', '.xml']
def get_source(self, environment, template):
if template is None:
raise TemplateNotFound(template)
if '.' in template:
return super(SmartLoader, self).get_source(environment, template)
for extension in SmartLoader.available_extension:
try:
filename = template + extension
return super(SmartLoader, self).get_source(environment, filename)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
class Template(object):
"""Template"""
def __init__(self, path, filters=None, **kwargs):
loader = SmartLoader(path)
self.env = jinja2.Environment(
loader=loader,
)
# Update filters
if isinstance(filters, dict):
self.env.filters.update(filters)
# Update global namesapce
self.env.globals.update(kwargs)
def render(self, name, **context):
"""Render template with name and context"""
template = self.env.get_template(name)
return template.render(**context)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Template"""
import peanut
import jinja2
from os import path
from jinja2 import FileSystemLoader
from jinja2.exceptions import TemplateNotFound
class SmartLoader(FileSystemLoader):
"""A smart template loader"""
available_extension = ['.html', '.xml']
def get_source(self, environment, template):
if template is None:
raise TemplateNotFound(template)
if '.' in template:
return super(SmartLoader, self).get_source(environment, template)
for extension in SmartLoader.available_extension:
try:
filename = template + extension
return super(SmartLoader, self).get_source(environment, filename)
except TemplateNotFound:
pass
raise TemplateNotFound(template)
class Template(object):
"""Template"""
def __init__(self, path, filters=None, **kwargs):
loader = SmartLoader(path)
self.env = jinja2.Environment(
loader=loader,
)
# Update filters
if isinstance(filters, dict):
self.env.filters.update(filters)
# Update global namesapce
self.env.globals.update(kwargs)
def update_context(self, **kwargs):
"""Update global context
"""
self.env.globals.update(kwargs)
def render(self, name, **context):
"""Render template with name and context"""
template = self.env.get_template(name)
return template.render(**context)
| Add an interface to update global context | Add an interface to update global context
| Python | mit | zqqf16/Peanut,zqqf16/Peanut,zqqf16/Peanut | ---
+++
@@ -45,6 +45,11 @@
# Update global namesapce
self.env.globals.update(kwargs)
+ def update_context(self, **kwargs):
+ """Update global context
+ """
+ self.env.globals.update(kwargs)
+
def render(self, name, **context):
"""Render template with name and context"""
|
a5387c85a898717a5ae13dafe6f0f2b19f44e749 | apps/vacancies/tasks.py | apps/vacancies/tasks.py | # coding: utf-8
import urllib
from celery import shared_task
from apps.vacancies.parsers import VacancySync, YandexRabotaParser
@shared_task(ignore_result=True)
def update_vacancies():
fulltime = {
'rid': 213,
'currency': 'RUR',
'text': 'python программист',
'strict': 'false',
'employment': 'FULL_EMPLOYMENT'
}
part_time = fulltime.copy()
part_time['employment'] = 'TEMPORARY_EMPLOYMENT'
syncer = VacancySync(parsers=[
YandexRabotaParser(urllib.parse.urlencode(fulltime), type='fulltime'),
YandexRabotaParser(urllib.parse.urlencode(part_time), type='contract')
])
return syncer.sync()
| # coding: utf-8
import urllib
from celery import shared_task
from apps.vacancies.parsers import VacancySync, YandexRabotaParser
@shared_task(ignore_result=True)
def update_vacancies():
fulltime = {
'rid': 213,
'currency': 'RUR',
'text': 'python программист',
'strict': 'false',
'employment': 'FULL_EMPLOYMENT'
}
part_time = fulltime.copy()
part_time['employment'] = 'TEMPORARY_EMPLOYMENT'
syncer = VacancySync(parsers=[
YandexRabotaParser(urllib.parse.urlencode(fulltime), type='fulltime'),
# Contract feed is broken as of 13 july (shows same data as fulltime)
# YandexRabotaParser(urllib.parse.urlencode(part_time), type='contract')
])
return syncer.sync()
| Disable contracts in jobs parser | Disable contracts in jobs parser
| Python | bsd-3-clause | moscowdjango/moscowdjango,moscowdjango/moscowdjango,moscowdjango/moscowdjango,VladimirFilonov/moscowdjango,moscowpython/moscowpython,VladimirFilonov/moscowdjango,moscowpython/moscowpython,VladimirFilonov/moscowdjango,moscowpython/moscowpython | ---
+++
@@ -18,6 +18,7 @@
syncer = VacancySync(parsers=[
YandexRabotaParser(urllib.parse.urlencode(fulltime), type='fulltime'),
- YandexRabotaParser(urllib.parse.urlencode(part_time), type='contract')
+ # Contract feed is broken as of 13 july (shows same data as fulltime)
+ # YandexRabotaParser(urllib.parse.urlencode(part_time), type='contract')
])
return syncer.sync() |
0281aaa0868d0bfa6ecb7368cff89b4af6b57129 | tests/functions_tests/test_dropout.py | tests/functions_tests/test_dropout.py | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
| import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import testing
from chainer.testing import attr
if cuda.available:
cuda.init()
class TestDropout(unittest.TestCase):
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (2, 3)).astype(numpy.float32)
def check_type_forward(self, x_data):
x = chainer.Variable(x_data)
try:
functions.dropout(x)
except Exception:
self.fail()
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
@attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
testing.run_module(__name__, __file__)
| Add attr.gpu decorator to gpu test of dropout | Add attr.gpu decorator to gpu test of dropout
| Python | mit | yanweifu/chainer,hvy/chainer,cupy/cupy,ysekky/chainer,woodshop/complex-chainer,niboshi/chainer,tkerola/chainer,kashif/chainer,kikusu/chainer,jnishi/chainer,okuta/chainer,niboshi/chainer,benob/chainer,chainer/chainer,AlpacaDB/chainer,sou81821/chainer,umitanuki/chainer,tscohen/chainer,cupy/cupy,laysakura/chainer,masia02/chainer,jfsantos/chainer,anaruse/chainer,keisuke-umezawa/chainer,truongdq/chainer,chainer/chainer,wkentaro/chainer,ktnyt/chainer,aonotas/chainer,ikasumi/chainer,kikusu/chainer,AlpacaDB/chainer,sinhrks/chainer,sinhrks/chainer,rezoo/chainer,okuta/chainer,jnishi/chainer,1986ks/chainer,muupan/chainer,ytoyama/yans_chainer_hackathon,minhpqn/chainer,wavelets/chainer,muupan/chainer,ktnyt/chainer,cemoody/chainer,ktnyt/chainer,cupy/cupy,kuwa32/chainer,ktnyt/chainer,hvy/chainer,jnishi/chainer,niboshi/chainer,Kaisuke5/chainer,tigerneil/chainer,ronekko/chainer,cupy/cupy,truongdq/chainer,pfnet/chainer,t-abe/chainer,hvy/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,t-abe/chainer,keisuke-umezawa/chainer,woodshop/chainer,jnishi/chainer,wkentaro/chainer,hidenori-t/chainer,elviswf/chainer,chainer/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,benob/chainer,delta2323/chainer,kiyukuta/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer | ---
+++
@@ -6,6 +6,7 @@
from chainer import cuda
from chainer import functions
from chainer import testing
+from chainer.testing import attr
if cuda.available:
@@ -27,6 +28,7 @@
def test_type_forward_cpu(self):
self.check_type_forward(self.x)
+ @attr.gpu
def test_type_forward_gpu(self):
self.check_type_forward(cuda.to_gpu(self.x))
|
45ba5e046a7ab76d54422a41604c7a90794cfd3f | app/handlers/__init__.py | app/handlers/__init__.py | __version__ = "2019.11.1"
__versionfull__ = __version__
| __version__ = "2019.12.0"
__versionfull__ = __version__
| Bump app version to 2019.12.0 | Bump app version to 2019.12.0
Signed-off-by: Guillaume Tucker <e7e20b7c30f19d4ca6b81319ef81bf200369d137@collabora.com>
| Python | lgpl-2.1 | kernelci/kernelci-backend,kernelci/kernelci-backend | ---
+++
@@ -1,2 +1,2 @@
-__version__ = "2019.11.1"
+__version__ = "2019.12.0"
__versionfull__ = __version__ |
f9c1393b9773a5df993a98b877ce5178d44c8575 | common.py | common.py | import os, os.path
base_dir = os.getcwd()
script_dir = os.path.realpath(os.path.dirname(__file__))
def get_crawl_dir():
return os.path.join(base_dir, "crawl")
def revision_present(version, revision):
return os.path.isdir(os.path.join(get_crawl_dir(), version, revision))
| import os, os.path
script_dir = os.path.realpath(os.path.dirname(__file__))
base_dir = script_dir
def get_crawl_dir():
return os.path.join(base_dir, "crawl")
def revision_present(version, revision):
return os.path.isdir(os.path.join(get_crawl_dir(), version, revision))
| Use the script dir as base_dir for now. | Use the script dir as base_dir for now.
| Python | mit | flodiebold/crawl-versions | ---
+++
@@ -1,7 +1,7 @@
import os, os.path
-base_dir = os.getcwd()
script_dir = os.path.realpath(os.path.dirname(__file__))
+base_dir = script_dir
def get_crawl_dir():
return os.path.join(base_dir, "crawl") |
9b4e803f68b33f193f0a41784e2f51672d69c4c2 | benchmarks/numpy-bench.py | benchmarks/numpy-bench.py | # Copyright 2019 Google LLC
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
import time
import numpy as np
import sys
n = int(sys.argv[1])
x = np.random.randn(n,n)
y = np.random.randn(n,n)
t0 = time.time()
z = np.dot(x, y)
print "Time for {} by {} matmul: {} s".format(n,n, time.time() - t0)
| # Copyright 2019 Google LLC
#
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file or at
# https://developers.google.com/open-source/licenses/bsd
import time
import numpy as np
import sys
from __future__ import print_function
n = int(sys.argv[1])
x = np.random.randn(n,n)
y = np.random.randn(n,n)
t0 = time.time()
z = np.dot(x, y)
print("Time for {} by {} matmul: {} s".format(n,n, time.time() - t0))
| Make bechmark compatible with python3 | Make bechmark compatible with python3
| Python | bsd-3-clause | google-research/dex-lang,google-research/dex-lang,google-research/dex-lang,google-research/dex-lang,google-research/dex-lang,google-research/dex-lang | ---
+++
@@ -7,6 +7,7 @@
import time
import numpy as np
import sys
+from __future__ import print_function
n = int(sys.argv[1])
@@ -15,4 +16,4 @@
t0 = time.time()
z = np.dot(x, y)
-print "Time for {} by {} matmul: {} s".format(n,n, time.time() - t0)
+print("Time for {} by {} matmul: {} s".format(n,n, time.time() - t0)) |
5ef45edb38ed5475351d484b89f0e99e5d50ea92 | examples/test-archive.py | examples/test-archive.py | from __future__ import print_function
import unittest
import modfoo
import saliweb.test
import os
class JobTests(saliweb.test.TestCase):
"""Check custom ModFoo Job class"""
def test_archive(self):
"""Test the archive method"""
# Make a ModFoo Job test job in ARCHIVED state
j = self.make_test_job(modfoo.Job, 'ARCHIVED')
# Run the rest of this testcase in the job's directory
d = saliweb.test.RunInDir(j.directory)
# Make a test PDB file and another incidental file
with open('test.pdb', 'w') as f:
print("test pdb", file=f)
with open('test.txt', 'w') as f:
print("text file", file=f)
# Run the job's "archive" method
j.archive()
# Job's archive method should have gzipped every PDB file but not
# anything else
self.assertTrue(os.path.exists('test.pdb.gz'))
self.assertFalse(os.path.exists('test.pdb'))
self.assertTrue(os.path.exists('test.txt'))
if __name__ == '__main__':
unittest.main()
| from __future__ import print_function
import unittest
import modfoo
import saliweb.test
import os
class JobTests(saliweb.test.TestCase):
"""Check custom ModFoo Job class"""
def test_archive(self):
"""Test the archive method"""
# Make a ModFoo Job test job in ARCHIVED state
j = self.make_test_job(modfoo.Job, 'ARCHIVED')
# Run the rest of this testcase in the job's directory
with saliweb.test.working_directory(j.directory):
# Make a test PDB file and another incidental file
with open('test.pdb', 'w') as f:
print("test pdb", file=f)
with open('test.txt', 'w') as f:
print("text file", file=f)
# Run the job's "archive" method
j.archive()
# Job's archive method should have gzipped every PDB file but not
# anything else
self.assertTrue(os.path.exists('test.pdb.gz'))
self.assertFalse(os.path.exists('test.pdb'))
self.assertTrue(os.path.exists('test.txt'))
if __name__ == '__main__':
unittest.main()
| Use context manager in example | Use context manager in example
| Python | lgpl-2.1 | salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb,salilab/saliweb | ---
+++
@@ -3,6 +3,7 @@
import modfoo
import saliweb.test
import os
+
class JobTests(saliweb.test.TestCase):
"""Check custom ModFoo Job class"""
@@ -12,21 +13,22 @@
# Make a ModFoo Job test job in ARCHIVED state
j = self.make_test_job(modfoo.Job, 'ARCHIVED')
# Run the rest of this testcase in the job's directory
- d = saliweb.test.RunInDir(j.directory)
- # Make a test PDB file and another incidental file
- with open('test.pdb', 'w') as f:
- print("test pdb", file=f)
- with open('test.txt', 'w') as f:
- print("text file", file=f)
+ with saliweb.test.working_directory(j.directory):
+ # Make a test PDB file and another incidental file
+ with open('test.pdb', 'w') as f:
+ print("test pdb", file=f)
+ with open('test.txt', 'w') as f:
+ print("text file", file=f)
- # Run the job's "archive" method
- j.archive()
+ # Run the job's "archive" method
+ j.archive()
- # Job's archive method should have gzipped every PDB file but not
- # anything else
- self.assertTrue(os.path.exists('test.pdb.gz'))
- self.assertFalse(os.path.exists('test.pdb'))
- self.assertTrue(os.path.exists('test.txt'))
+ # Job's archive method should have gzipped every PDB file but not
+ # anything else
+ self.assertTrue(os.path.exists('test.pdb.gz'))
+ self.assertFalse(os.path.exists('test.pdb'))
+ self.assertTrue(os.path.exists('test.txt'))
+
if __name__ == '__main__':
unittest.main() |
169df99132c9f4d0d44a9207184e53537d0688ec | tappy_tests.py | tappy_tests.py | # -*- coding: utf-8 -*-
"""
Tappy Terror Tests
------------------
Tests Tappy Terror
:license: MIT; details in LICENSE
"""
import tappy
import unittest
class TappyTerrorTestCase(unittest.TestCase):
def setUp(self):
tappy.app.config['TESTING'] = True
self.app = tappy.app.test_client()
def test_index_renders(self):
result = self.app.get('/')
self.assertEqual(result.status_code, 200)
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
"""
Tappy Terror Tests
------------------
Tests Tappy Terror
:license: MIT; details in LICENSE
"""
import tappy
import unittest
class TappyTerrorWebTestCase(unittest.TestCase):
def setUp(self):
tappy.app.config['TESTING'] = True
self.app = tappy.app.test_client()
def test_index_renders(self):
result = self.app.get('/')
self.assertEqual(result.status_code, 200)
if __name__ == '__main__':
unittest.main()
| Rename test case to be clearly for the web-facing parts of Tappy Terror | Rename test case to be clearly for the web-facing parts of Tappy Terror
| Python | mit | jculpon/tappy-flask,jculpon/tappy-flask | ---
+++
@@ -11,7 +11,7 @@
import tappy
import unittest
-class TappyTerrorTestCase(unittest.TestCase):
+class TappyTerrorWebTestCase(unittest.TestCase):
def setUp(self):
tappy.app.config['TESTING'] = True
self.app = tappy.app.test_client() |
44cec721b7a0a20059c143124a889f8f7a1fe615 | config.py | config.py | ###
# Copyright (c) 2012, spline
# All rights reserved.
#
#
###
import os
import supybot.conf as conf
import supybot.registry as registry
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('MLB')
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('MLB', True)
MLB = conf.registerPlugin('MLB')
# This is where your configuration variables (if any) should go. For example:
conf.registerGlobalValue(MLB, 'dbLocation', registry.String(os.path.abspath(os.path.dirname(__file__)) + '/mlb.db', _("""Absolute path for mlb.db sqlite3 database file location.""")))
conf.registerGlobalValue(MLB, 'ffApiKey', registry.String('', """api key for fanfeedr.com""", private=True))
conf.registerGlobalValue(MLB, 'usatApiKey', registry.String('', """api key for developer.usatoday.com""", private=True))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=250:
| ###
# Copyright (c) 2012-2013, spline
# All rights reserved.
#
#
###
import os
import supybot.conf as conf
import supybot.registry as registry
from supybot.i18n import PluginInternationalization, internationalizeDocstring
_ = PluginInternationalization('MLB')
def configure(advanced):
# This will be called by supybot to configure this module. advanced is
# a bool that specifies whether the user identified himself as an advanced
# user or not. You should effect your configuration by manipulating the
# registry as appropriate.
from supybot.questions import expect, anything, something, yn
conf.registerPlugin('MLB', True)
MLB = conf.registerPlugin('MLB')
conf.registerGlobalValue(MLB, 'usatApiKey', registry.String('', """api key for developer.usatoday.com""", private=True))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=250:
| Remove the dbLocation as we do this within plugin.py | Remove the dbLocation as we do this within plugin.py
| Python | mit | reticulatingspline/MLB | ---
+++
@@ -1,5 +1,5 @@
###
-# Copyright (c) 2012, spline
+# Copyright (c) 2012-2013, spline
# All rights reserved.
#
#
@@ -23,9 +23,6 @@
MLB = conf.registerPlugin('MLB')
-# This is where your configuration variables (if any) should go. For example:
-conf.registerGlobalValue(MLB, 'dbLocation', registry.String(os.path.abspath(os.path.dirname(__file__)) + '/mlb.db', _("""Absolute path for mlb.db sqlite3 database file location.""")))
-conf.registerGlobalValue(MLB, 'ffApiKey', registry.String('', """api key for fanfeedr.com""", private=True))
conf.registerGlobalValue(MLB, 'usatApiKey', registry.String('', """api key for developer.usatoday.com""", private=True))
# vim:set shiftwidth=4 tabstop=4 expandtab textwidth=250: |
e71caded81851c585dcfd2326f8aa4342d9fbd8b | docs/conf.py | docs/conf.py | AUTHOR = u'Adrian Sampson'
# General configuration
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks']
exclude_patterns = ['_build']
source_suffix = '.rst'
master_doc = 'index'
project = u'beets'
copyright = u'2012, Adrian Sampson'
version = '1.3'
release = '1.3.11'
pygments_style = 'sphinx'
# External links to the bug tracker.
extlinks = {
'bug': ('https://github.com/sampsyo/beets/issues/%s', '#'),
'user': ('https://github.com/%s', ''),
}
# Options for HTML output
html_theme = 'default'
htmlhelp_basename = 'beetsdoc'
# Options for LaTeX output
latex_documents = [
('index', 'beets.tex', u'beets Documentation',
AUTHOR, 'manual'),
]
# Options for manual page output
man_pages = [
('reference/cli', 'beet', u'music tagger and library organizer',
[AUTHOR], 1),
('reference/config', 'beetsconfig', u'beets configuration file',
[AUTHOR], 5),
]
| AUTHOR = u'Adrian Sampson'
# General configuration
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.extlinks']
exclude_patterns = ['_build']
source_suffix = '.rst'
master_doc = 'index'
project = u'beets'
copyright = u'2012, Adrian Sampson'
version = '1.3'
release = '1.3.11'
pygments_style = 'sphinx'
# External links to the bug tracker.
extlinks = {
'bug': ('https://github.com/sampsyo/beets/issues/%s', '#'),
'user': ('https://github.com/%s', ''),
}
# Options for HTML output
html_theme = 'classic'
htmlhelp_basename = 'beetsdoc'
# Options for LaTeX output
latex_documents = [
('index', 'beets.tex', u'beets Documentation',
AUTHOR, 'manual'),
]
# Options for manual page output
man_pages = [
('reference/cli', 'beet', u'music tagger and library organizer',
[AUTHOR], 1),
('reference/config', 'beetsconfig', u'beets configuration file',
[AUTHOR], 5),
]
| Update docs' html_theme value: default → classic | Update docs' html_theme value: default → classic
New proposed default is 'alabaster', which looks nice but leaves less
room to the core content. 'classic' replaces 'default'.
Anyway readthedocs.org applies its own theme so this only impacts local
builds.
| Python | mit | mosesfistos1/beetbox,MyTunesFreeMusic/privacy-policy,PierreRust/beets,multikatt/beets,shamangeorge/beets,mathstuf/beets,diego-plan9/beets,ruippeixotog/beets,mathstuf/beets,parapente/beets,gabrielaraujof/beets,YetAnotherNerd/beets,shamangeorge/beets,kareemallen/beets,swt30/beets,marcuskrahl/beets,Dishwishy/beets,sadatay/beets,randybias/beets,lengtche/beets,madmouser1/beets,swt30/beets,MyTunesFreeMusic/privacy-policy,asteven/beets,pkess/beets,swt30/beets,mried/beets,sampsyo/beets,imsparsh/beets,Freso/beets,ruippeixotog/beets,untitaker/beets,arabenjamin/beets,ruippeixotog/beets,imsparsh/beets,YetAnotherNerd/beets,drm00/beets,Freso/beets,lightwang1/beets,parapente/beets,drm00/beets,mosesfistos1/beetbox,ibmibmibm/beets,untitaker/beets,SusannaMaria/beets,Andypsamp/CODjunit,Andypsamp/CODjunit,beetbox/beets,lightwang1/beets,gabrielaraujof/beets,randybias/beets,madmouser1/beets,jcoady9/beets,Andypsamp/CODjunit,ttsda/beets,Andypsamp/CODfinalJUNIT,kelvinhammond/beets,m-urban/beets,parapente/beets,Dishwishy/beets,mathstuf/beets,mried/beets,dfc/beets,moodboom/beets,Freso/beets,LordSputnik/beets,asteven/beets,moodboom/beets,PierreRust/beets,YetAnotherNerd/beets,dfc/beets,LordSputnik/beets,swt30/beets,jackwilsdon/beets,Andypsamp/CODfinalJUNIT,kareemallen/beets,pkess/beets,drm00/beets,gabrielaraujof/beets,MyTunesFreeMusic/privacy-policy,shanemikel/beets,xsteadfastx/beets,Kraymer/beets,artemutin/beets,multikatt/beets,xsteadfastx/beets,Dishwishy/beets,sadatay/beets,LordSputnik/beets,LordSputnik/beets,lengtche/beets,tima/beets,sampsyo/beets,pkess/beets,tima/beets,dfc/beets,sadatay/beets,kareemallen/beets,Andypsamp/CODfinalJUNIT,tima/beets,ibmibmibm/beets,jmwatte/beets,Freso/beets,dfc/beets,untitaker/beets,jackwilsdon/beets,lengtche/beets,Dishwishy/beets,xsteadfastx/beets,ttsda/beets,shanemikel/beets,m-urban/beets,jmwatte/beets,asteven/beets,Andypsamp/CODfinalJUNIT,YetAnotherNerd/beets,shanemikel/beets,sampsyo/beets,gabrielaraujof/beets,parapente/beets,madmouser1/beets,Andypsamp/CODjunit,jackwilsdon/beets,arabenjamin/beets,ttsda/beets,asteven/beets,SusannaMaria/beets,ruippeixotog/beets,jcoady9/beets,mried/beets,ttsda/beets,pkess/beets,Kraymer/beets,PierreRust/beets,m-urban/beets,diego-plan9/beets,SusannaMaria/beets,beetbox/beets,beetbox/beets,diego-plan9/beets,artemutin/beets,imsparsh/beets,moodboom/beets,kelvinhammond/beets,artemutin/beets,randybias/beets,mathstuf/beets,ibmibmibm/beets,kareemallen/beets,multikatt/beets,shamangeorge/beets,marcuskrahl/beets,jcoady9/beets,kelvinhammond/beets,lightwang1/beets,imsparsh/beets,PierreRust/beets,sampsyo/beets,lengtche/beets,Kraymer/beets,shanemikel/beets,shamangeorge/beets,mried/beets,diego-plan9/beets,m-urban/beets,arabenjamin/beets,Andypsamp/CODjunit,arabenjamin/beets,beetbox/beets,randybias/beets,sadatay/beets,jackwilsdon/beets,madmouser1/beets,xsteadfastx/beets,mosesfistos1/beetbox,multikatt/beets,drm00/beets,marcuskrahl/beets,SusannaMaria/beets,jcoady9/beets,artemutin/beets,tima/beets,kelvinhammond/beets,moodboom/beets,jmwatte/beets,jmwatte/beets,marcuskrahl/beets,untitaker/beets,MyTunesFreeMusic/privacy-policy,Andypsamp/CODfinalJUNIT,Kraymer/beets,mosesfistos1/beetbox,ibmibmibm/beets,lightwang1/beets | ---
+++
@@ -23,7 +23,7 @@
}
# Options for HTML output
-html_theme = 'default'
+html_theme = 'classic'
htmlhelp_basename = 'beetsdoc'
# Options for LaTeX output |
bc05c56c60fa61f045079a4b3ef2dea185b213b4 | fortuitus/fcore/tests.py | fortuitus/fcore/tests.py | from django.core.urlresolvers import reverse
from django.test import TestCase
from fortuitus.fcore.factories import UserF
from fortuitus.fcore.models import FortuitusProfile
class HomeViewTestCase(TestCase):
def test_renders_template(self):
""" Tests is home page is rendered properly. """
response = self.client.get(reverse('home'))
self.assertEqual(200, response.status_code)
self.assertTemplateUsed('fortuitus/fcore/home.html')
class ProfileTestCase(TestCase):
def test_profile_created(self):
""" Tests that profile is automatically created along with User. """
u = UserF.create()
p = FortuitusProfile.objects.all()[0]
self.assertEqual(u.fortuitusprofile, p)
def test_profiles_not_conflicted(self):
"""
Tests that second profile is created and not conflicted with the first
user nor his profile.
"""
u1 = UserF.create()
p1 = FortuitusProfile.objects.get(user_id=u1.pk)
u2 = UserF.create()
p2 = FortuitusProfile.objects.get(user_id=u2.pk)
self.assertNotEqual(p1, p2)
| from django.core.urlresolvers import reverse
from django.test import TestCase
from fortuitus.fcore.factories import UserF
from fortuitus.fcore.models import FortuitusProfile
class HomeViewTestCase(TestCase):
def test_renders_template(self):
""" Tests is home page is rendered properly. """
response = self.client.get(reverse('home'))
self.assertEqual(200, response.status_code)
self.assertTemplateUsed('fortuitus/fcore/home.html')
class ProfileTestCase(TestCase):
def test_profile_created(self):
""" Tests that profile is automatically created along with User. """
u = UserF.create()
p = FortuitusProfile.objects.all()[0]
self.assertEqual(u.fortuitusprofile, p)
def test_profiles_not_conflicted(self):
"""
Tests that second profile is created and not conflicted with the first
user nor his profile.
"""
u1 = UserF.create(username='u1')
p1 = FortuitusProfile.objects.get(user_id=u1.pk)
u2 = UserF.create(username='u2')
p2 = FortuitusProfile.objects.get(user_id=u2.pk)
self.assertNotEqual(p1, p2)
| Fix failing user profile test | Fix failing user profile test
| Python | mit | elegion/djangodash2012,elegion/djangodash2012 | ---
+++
@@ -25,8 +25,8 @@
Tests that second profile is created and not conflicted with the first
user nor his profile.
"""
- u1 = UserF.create()
+ u1 = UserF.create(username='u1')
p1 = FortuitusProfile.objects.get(user_id=u1.pk)
- u2 = UserF.create()
+ u2 = UserF.create(username='u2')
p2 = FortuitusProfile.objects.get(user_id=u2.pk)
self.assertNotEqual(p1, p2) |
2f44eb65e22672a894cced9c9de8d64f72d0fc39 | pyosmo/algorithm/weighted.py | pyosmo/algorithm/weighted.py | from typing import List
from pyosmo.algorithm.base import OsmoAlgorithm
from pyosmo.history.history import OsmoHistory
from pyosmo.model import TestStep
class WeightedAlgorithm(OsmoAlgorithm):
""" Weighted random algorithm """
def choose(self, history: OsmoHistory, choices: List[TestStep]) -> TestStep:
return self.random.choices(choices, weights=[c.weight for c in choices])[0]
class WeightedBalancingAlgorithm(OsmoAlgorithm):
""" Weighted algorithm which balances based on history """
def choose(self, history: OsmoHistory, choices: List[TestStep]) -> TestStep:
weights = [c.weight for c in choices]
normalized_weights = [float(i) / max(weights) for i in weights]
history_counts = [history.get_step_count(choice) for choice in choices]
if max(history_counts) == 0:
return self.random.choices(choices, weights=normalized_weights)[0]
history_normalized_weights = [float(i) / max(history_counts) for i in history_counts]
total_weights = [a - b if a - b != 0 else 0.1 for (a, b) in zip(normalized_weights, history_normalized_weights)]
return self.random.choices(choices, weights=total_weights)[0]
| from typing import List
from pyosmo.algorithm.base import OsmoAlgorithm
from pyosmo.history.history import OsmoHistory
from pyosmo.model import TestStep
class WeightedAlgorithm(OsmoAlgorithm):
""" Weighted random algorithm """
def choose(self, history: OsmoHistory, choices: List[TestStep]) -> TestStep:
return self.random.choices(choices, weights=[c.weight for c in choices])[0]
class WeightedBalancingAlgorithm(OsmoAlgorithm):
""" Weighted algorithm which balances based on history """
def choose(self, history: OsmoHistory, choices: List[TestStep]) -> TestStep:
weights = [c.weight for c in choices]
normalized_weights = [float(i) / max(weights) for i in weights]
history_counts = [history.get_step_count(choice) for choice in choices]
if max(history_counts) == 0:
return self.random.choices(choices, weights=normalized_weights)[0]
history_normalized_weights = [float(i) / max(history_counts) for i in history_counts]
total_weights = [a - b if a - b != 0 else 0.1 for (a, b) in zip(normalized_weights, history_normalized_weights)]
# Make sure that total weight is more than zero
if sum(total_weights) < 0:
temp_add = (abs(sum(total_weights)) + 0.2) / len(total_weights)
total_weights = [temp_add + x for x in total_weights]
return self.random.choices(choices, weights=total_weights)[0]
| Fix py3.9 check that total weight need to be more than zero | Fix py3.9 check that total weight need to be more than zero
| Python | mit | OPpuolitaival/pyosmo,OPpuolitaival/pyosmo | ---
+++
@@ -26,4 +26,10 @@
history_normalized_weights = [float(i) / max(history_counts) for i in history_counts]
total_weights = [a - b if a - b != 0 else 0.1 for (a, b) in zip(normalized_weights, history_normalized_weights)]
+
+ # Make sure that total weight is more than zero
+ if sum(total_weights) < 0:
+ temp_add = (abs(sum(total_weights)) + 0.2) / len(total_weights)
+ total_weights = [temp_add + x for x in total_weights]
+
return self.random.choices(choices, weights=total_weights)[0] |
c108c418935b2b5ea8ec42696a8d11f97601e552 | qual/calendars/historical.py | qual/calendars/historical.py | from datetime import date
from base import Calendar
from main import JulianCalendar
class JulianToGregorianCalendar(Calendar):
def date(self, year, month, day):
gregorian_date = date(year, month, day)
if gregorian_date < self.first_gregorian_day:
julian_date = JulianCalendar().date(year, month, day)
self.bless(julian_date)
return julian_date
return self.from_date(gregorian_date)
def bless(self, date):
date.calendar = self.__class__
class EnglishHistoricalCalendar(JulianToGregorianCalendar):
first_gregorian_day = date(1752, 9, 14)
| from datetime import date
from base import Calendar
from main import JulianCalendar
class JulianToGregorianCalendar(Calendar):
def date(self, year, month, day):
gregorian_date = date(year, month, day)
if gregorian_date < self.first_gregorian_day:
julian_date = JulianCalendar().date(year, month, day)
self.bless(julian_date)
return julian_date
return self.from_date(gregorian_date)
def bless(self, date):
date.calendar = self.__class__
class EnglishHistoricalCalendar(JulianToGregorianCalendar):
first_gregorian_day = date(1752, 9, 13)
| Change the first gregorian date in the English calendar. | Change the first gregorian date in the English calendar.
| Python | apache-2.0 | jwg4/calexicon,jwg4/qual | ---
+++
@@ -16,4 +16,4 @@
date.calendar = self.__class__
class EnglishHistoricalCalendar(JulianToGregorianCalendar):
- first_gregorian_day = date(1752, 9, 14)
+ first_gregorian_day = date(1752, 9, 13) |
7a3325a7fe5c99116587751ae58480b3b83760d1 | bokeh/charts/__init__.py | bokeh/charts/__init__.py | from __future__ import absolute_import
# defaults and constants
from .utils import DEFAULT_PALETTE
from ._chart_options import default_options as defaults
# main components
from ._chart import Chart
# operations and attributes for users to input into Charts
from ._attributes import color
from .operations import stack, blend
# builders
from .builder.line_builder import Line
from .builder.histogram_builder import Histogram
from .builder.bar_builder import Bar
from .builder.scatter_builder import Scatter
from .builder.boxplot_builder import BoxPlot
# easy access to required bokeh components
from ..models import ColumnDataSource
from ..io import (
curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot)
# Silence pyflakes
(curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot, ColumnDataSource)
| from __future__ import absolute_import
# defaults and constants
from .utils import DEFAULT_PALETTE
from ._chart_options import default_options as defaults
# main components
from ._chart import Chart
# operations and attributes for users to input into Charts
from ._attributes import color, marker
from .operations import stack, blend
# builders
from .builder.line_builder import Line
from .builder.histogram_builder import Histogram
from .builder.bar_builder import Bar
from .builder.scatter_builder import Scatter
from .builder.boxplot_builder import BoxPlot
# easy access to required bokeh components
from ..models import ColumnDataSource
from ..io import (
curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot)
# Silence pyflakes
(curdoc, cursession, output_file, output_notebook, output_server, push,
reset_output, save, show, gridplot, vplot, hplot, ColumnDataSource)
| Add marker attr spec function to chart import. | Add marker attr spec function to chart import.
| Python | bsd-3-clause | aavanian/bokeh,philippjfr/bokeh,KasperPRasmussen/bokeh,phobson/bokeh,philippjfr/bokeh,ptitjano/bokeh,DuCorey/bokeh,ericmjl/bokeh,ptitjano/bokeh,msarahan/bokeh,justacec/bokeh,maxalbert/bokeh,draperjames/bokeh,timsnyder/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,DuCorey/bokeh,DuCorey/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,jakirkham/bokeh,aiguofer/bokeh,clairetang6/bokeh,clairetang6/bokeh,dennisobrien/bokeh,aiguofer/bokeh,aiguofer/bokeh,rs2/bokeh,htygithub/bokeh,aavanian/bokeh,Karel-van-de-Plassche/bokeh,azjps/bokeh,rs2/bokeh,rs2/bokeh,rs2/bokeh,azjps/bokeh,ChinaQuants/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,Karel-van-de-Plassche/bokeh,timsnyder/bokeh,ericmjl/bokeh,philippjfr/bokeh,jakirkham/bokeh,ericmjl/bokeh,schoolie/bokeh,mindriot101/bokeh,bokeh/bokeh,percyfal/bokeh,KasperPRasmussen/bokeh,dennisobrien/bokeh,htygithub/bokeh,aavanian/bokeh,maxalbert/bokeh,timsnyder/bokeh,azjps/bokeh,mindriot101/bokeh,clairetang6/bokeh,percyfal/bokeh,rs2/bokeh,timsnyder/bokeh,gpfreitas/bokeh,justacec/bokeh,jakirkham/bokeh,quasiben/bokeh,aavanian/bokeh,maxalbert/bokeh,htygithub/bokeh,clairetang6/bokeh,gpfreitas/bokeh,draperjames/bokeh,bokeh/bokeh,bokeh/bokeh,ericmjl/bokeh,ericmjl/bokeh,phobson/bokeh,schoolie/bokeh,DuCorey/bokeh,mindriot101/bokeh,aiguofer/bokeh,gpfreitas/bokeh,azjps/bokeh,draperjames/bokeh,philippjfr/bokeh,azjps/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,ptitjano/bokeh,phobson/bokeh,percyfal/bokeh,draperjames/bokeh,mindriot101/bokeh,jakirkham/bokeh,DuCorey/bokeh,msarahan/bokeh,percyfal/bokeh,msarahan/bokeh,justacec/bokeh,bokeh/bokeh,philippjfr/bokeh,percyfal/bokeh,dennisobrien/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,ChinaQuants/bokeh,stonebig/bokeh,quasiben/bokeh,bokeh/bokeh,draperjames/bokeh,aavanian/bokeh,stonebig/bokeh,quasiben/bokeh,aiguofer/bokeh,justacec/bokeh,stonebig/bokeh,ptitjano/bokeh,schoolie/bokeh,stonebig/bokeh,maxalbert/bokeh,KasperPRasmussen/bokeh,timsnyder/bokeh,schoolie/bokeh,dennisobrien/bokeh,schoolie/bokeh,msarahan/bokeh,phobson/bokeh | ---
+++
@@ -8,7 +8,7 @@
from ._chart import Chart
# operations and attributes for users to input into Charts
-from ._attributes import color
+from ._attributes import color, marker
from .operations import stack, blend
# builders |
e949f5cb8ad2a8e8642ce9307cb8eedf3caf1254 | src/pyhmsa/fileformat/xmlhandler/header.py | src/pyhmsa/fileformat/xmlhandler/header.py | #!/usr/bin/env python
"""
================================================================================
:mod:`header` -- XML handler for header
================================================================================
.. module:: header
:synopsis: XML handler for header
.. inheritance-diagram:: header
"""
# Standard library modules.
import xml.etree.ElementTree as etree
# Third party modules.
# Local modules.
from pyhmsa.spec.header import Header
from pyhmsa.fileformat.xmlhandler.xmlhandler import _XMLHandler
# Globals and constants variables.
class HeaderXMLHandler(_XMLHandler):
def can_parse(self, element):
return element.tag == 'Header'
def parse(self, element):
obj = self._parse_parameter(element, Header)
for subelement in element.iter():
name = subelement.tag
if name in obj:
continue # already parsed
obj[name] = subelement.text
return obj
def can_convert(self, obj):
return isinstance(obj, Header)
def convert(self, obj):
element = self._convert_parameter(obj, 'Header')
for name, value in obj._extras.items():
subelement = etree.Element(name)
subelement.text = str(value)
element.append(subelement)
return element
| #!/usr/bin/env python
"""
================================================================================
:mod:`header` -- XML handler for header
================================================================================
.. module:: header
:synopsis: XML handler for header
.. inheritance-diagram:: header
"""
# Standard library modules.
import xml.etree.ElementTree as etree
# Third party modules.
# Local modules.
from pyhmsa.spec.header import Header
from pyhmsa.fileformat.xmlhandler.xmlhandler import _XMLHandler
# Globals and constants variables.
class HeaderXMLHandler(_XMLHandler):
def can_parse(self, element):
return element.tag == 'Header'
def parse(self, element):
obj = self._parse_parameter(element, Header)
for subelement in element:
name = subelement.tag
if name in obj:
continue # already parsed
obj[name] = subelement.text
return obj
def can_convert(self, obj):
return isinstance(obj, Header)
def convert(self, obj):
element = self._convert_parameter(obj, 'Header')
for name, value in obj._extras.items():
subelement = etree.Element(name)
subelement.text = str(value)
element.append(subelement)
return element
| Fix iteration was over whole element instead of only over subelements. | Fix iteration was over whole element instead of only over subelements.
| Python | mit | pyhmsa/pyhmsa | ---
+++
@@ -30,7 +30,7 @@
def parse(self, element):
obj = self._parse_parameter(element, Header)
- for subelement in element.iter():
+ for subelement in element:
name = subelement.tag
if name in obj:
continue # already parsed |
b438e3858910eee4f24a5f33858fb039240750cd | get_data_from_twitter.py | get_data_from_twitter.py | # -*- coding: UTF-8 -*-
import numpy
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import json
import config
#Much of this code comes from http://adilmoujahid.com/posts/2014/07/twitter-analytics/
class StdOutListener(StreamListener):
def on_data(self, data_str):
data = json.loads(data_str)
if len(data['entities']['urls']) != 0:
newdata = {'created_at' : data['created_at'], 'text' : data['text'],
'urls' : [url['expanded_url'] for url in data['entities']['urls'] if url['url'] != '' ] }
if len(newdata['urls']) != 0:
print json.dumps(newdata)
return True
def on_error(self, status):
print status
l = StdOutListener()
auth = OAuthHandler(config.consumer_key, config.consumer_secret)
auth.set_access_token(config.access_token, config.access_token_secret)
stream = Stream(auth, l)
stream.filter(track=['#Trump2016', '#Hillary2016'])
| # -*- coding: UTF-8 -*-
import numpy
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
import json
import config
#Much of this code comes from http://adilmoujahid.com/posts/2014/07/twitter-analytics/
class StdOutListener(StreamListener):
def on_data(self, data_str):
data = json.loads(data_str)
if len(data['entities']['urls']) != 0:
newdata = {'created_at' : data['created_at'], 'text' : data['text'],
'hashtags' : [hashtag['text'] for hashtag in data['entities']['hashtags'] ],
'urls' : [url['expanded_url'] for url in data['entities']['urls'] if url['url'] != '' ] }
if len(newdata['urls']) != 0:
print json.dumps(newdata)
return True
def on_error(self, status):
print status
l = StdOutListener()
auth = OAuthHandler(config.consumer_key, config.consumer_secret)
auth.set_access_token(config.access_token, config.access_token_secret)
stream = Stream(auth, l)
stream.filter(track=['#Trump2016', '#Hillary2016'])
| Add support for pulling hash tags | Add support for pulling hash tags
| Python | mpl-2.0 | aDataAlchemist/election-tweets | ---
+++
@@ -14,6 +14,7 @@
data = json.loads(data_str)
if len(data['entities']['urls']) != 0:
newdata = {'created_at' : data['created_at'], 'text' : data['text'],
+ 'hashtags' : [hashtag['text'] for hashtag in data['entities']['hashtags'] ],
'urls' : [url['expanded_url'] for url in data['entities']['urls'] if url['url'] != '' ] }
if len(newdata['urls']) != 0:
print json.dumps(newdata) |
bb6599477ffe696a5d37a781b33f02f5623dc1a2 | eve_swagger/swagger.py | eve_swagger/swagger.py | # -*- coding: utf-8 -*-
"""
eve-swagger.swagger
~~~~~~~~~~~~~~~~~~~
swagger.io extension for Eve-powered REST APIs.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from collections import OrderedDict
from flask import Blueprint, jsonify
from objects import info, host, base_path, schemes, consumes, produces, \
definitions, parameters, responses, security_definitions, security, \
tags, external_docs
from paths import paths
swagger = Blueprint('eve_swagger', __name__)
@swagger.route('/api-docs')
def index():
root = OrderedDict()
root['swagger'] = '2.0'
node(root, 'info', info())
node(root, 'host', host())
node(root, 'basePath', base_path())
node(root, 'schemes', schemes())
node(root, 'consumes', consumes())
node(root, 'produces', produces())
node(root, 'paths', paths())
node(root, 'definitions', definitions())
node(root, 'parameters', parameters())
node(root, 'responses', responses())
node(root, 'securityDefinitions', security_definitions())
node(root, 'security', security())
node(root, 'tags', tags())
node(root, 'externalDocs', external_docs())
return jsonify(root)
def node(parent, key, value):
if value:
parent[key] = value
| # -*- coding: utf-8 -*-
"""
eve-swagger.swagger
~~~~~~~~~~~~~~~~~~~
swagger.io extension for Eve-powered REST APIs.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from collections import OrderedDict
from flask import Blueprint, jsonify
from objects import info, host, base_path, schemes, consumes, produces, \
definitions, parameters, responses, security_definitions, security, \
tags, external_docs
from paths import paths
swagger = Blueprint('eve_swagger', __name__)
@swagger.route('/api-docs')
def index():
def node(parent, key, value):
if value:
parent[key] = value
root = OrderedDict()
root['swagger'] = '2.0'
node(root, 'info', info())
node(root, 'host', host())
node(root, 'basePath', base_path())
node(root, 'schemes', schemes())
node(root, 'consumes', consumes())
node(root, 'produces', produces())
node(root, 'paths', paths())
node(root, 'definitions', definitions())
node(root, 'parameters', parameters())
node(root, 'responses', responses())
node(root, 'securityDefinitions', security_definitions())
node(root, 'security', security())
node(root, 'tags', tags())
node(root, 'externalDocs', external_docs())
return jsonify(root)
| Refactor node() into a closure | Refactor node() into a closure
| Python | bsd-3-clause | nicolaiarocci/eve-swagger | ---
+++
@@ -21,6 +21,10 @@
@swagger.route('/api-docs')
def index():
+ def node(parent, key, value):
+ if value:
+ parent[key] = value
+
root = OrderedDict()
root['swagger'] = '2.0'
node(root, 'info', info())
@@ -39,8 +43,3 @@
node(root, 'externalDocs', external_docs())
return jsonify(root)
-
-
-def node(parent, key, value):
- if value:
- parent[key] = value |
5c8cd5be944da9d765f71e62c42e1525736b14a1 | tests/tools.py | tests/tools.py | # coding=utf-8
"""
Test tools required by multiple suites.
"""
import contextlib
import shutil
import subprocess
import tempfile
from devpi_builder import devpi
@contextlib.contextmanager
def devpi_server(port=2414):
server_dir = tempfile.mkdtemp()
try:
subprocess.check_output(['devpi-server', '--start', '--serverdir={}'.format(server_dir), '--port={}'.format(port)], stderr=subprocess.STDOUT)
try:
yield 'http://localhost:{}'.format(port)
finally:
subprocess.check_output(['devpi-server', '--stop', '--serverdir={}'.format(server_dir)], stderr=subprocess.STDOUT)
finally:
shutil.rmtree(server_dir)
@contextlib.contextmanager
def devpi_index(server_url, user, index):
"""
Creates the given user and index, and cleans it afterwards.
Yields of tuple of index-url and password. The index is created without an upstream.
"""
password = 'foo'
with devpi.Client(server_url) as devpi_client:
devpi_client._execute('user', '-c', user, 'password=' + password)
devpi_client._execute('login', user, '--password=' + password)
devpi_client._execute('index', '-c', 'wheels', 'bases=')
yield '{}/{}/{}'.format(server_url, user, index), password
devpi_client._execute('index', '--delete', '/{}/{}'.format(user, index))
devpi_client._execute('user', user, '--delete')
| # coding=utf-8
"""
Test tools required by multiple suites.
"""
import contextlib
import shutil
import subprocess
import tempfile
from devpi_builder import devpi
@contextlib.contextmanager
def devpi_server(port=2414):
server_dir = tempfile.mkdtemp()
try:
subprocess.check_output(['devpi-server', '--start', '--serverdir={}'.format(server_dir), '--port={}'.format(port)], stderr=subprocess.STDOUT)
try:
yield 'http://localhost:{}'.format(port)
finally:
subprocess.check_output(['devpi-server', '--stop', '--serverdir={}'.format(server_dir)], stderr=subprocess.STDOUT)
finally:
shutil.rmtree(server_dir)
@contextlib.contextmanager
def devpi_index(server_url, user, index):
"""
Creates the given user and index, and cleans it afterwards.
Yields of tuple of index-url and password. The index is created without an upstream.
"""
password = 'foo'
with devpi.Client(server_url) as devpi_client:
devpi_client._execute('user', '-c', user, 'password=' + password)
devpi_client._execute('login', user, '--password=' + password)
devpi_client._execute('index', '-c', index, 'bases=')
yield '{}/{}/{}'.format(server_url, user, index), password
devpi_client._execute('index', '--delete', '/{}/{}'.format(user, index))
devpi_client._execute('user', user, '--delete')
| Fix name of index created for tests. | Fix name of index created for tests.
| Python | bsd-3-clause | tylerdave/devpi-builder | ---
+++
@@ -36,7 +36,7 @@
with devpi.Client(server_url) as devpi_client:
devpi_client._execute('user', '-c', user, 'password=' + password)
devpi_client._execute('login', user, '--password=' + password)
- devpi_client._execute('index', '-c', 'wheels', 'bases=')
+ devpi_client._execute('index', '-c', index, 'bases=')
yield '{}/{}/{}'.format(server_url, user, index), password
|
d37f9646b13df624f04050a63d34b3d33e9e6e9e | python/matasano/set1/c8.py | python/matasano/set1/c8.py | from matasano.util.converters import hex_to_bytestr
from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
for line in chal_file:
ct = hex_to_bytestr(line[:-1])
for i in range(0, len(ct), 16):
for j in range(i+16, len(ct), 16):
if ct[i:i+16] == ct[j:j+16]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
break
else:
continue
break
else:
continue
break
| from matasano.util.converters import hex_to_bytestr
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
coll_count = {}
for idx, line in enumerate(chal_file):
count = 0
ct = line[:-1]
for i in range(0, len(ct), 32):
for j in range(i+32, len(ct), 32):
if ct[i:i+32] == ct[j:j+32]:
print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
count += 1
coll_count[idx] = count
most_coll = max(coll_count, key=coll_count.get)
print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
| Improve the code, return most collisions. Work on hex strings. | Improve the code, return most collisions. Work on hex strings.
| Python | mit | TheLunchtimeAttack/matasano-challenges,TheLunchtimeAttack/matasano-challenges | ---
+++
@@ -1,21 +1,21 @@
from matasano.util.converters import hex_to_bytestr
-from Crypto.Cipher import AES
if __name__ == "__main__":
chal_file = open("matasano/data/c8.txt", 'r');
- for line in chal_file:
- ct = hex_to_bytestr(line[:-1])
- for i in range(0, len(ct), 16):
- for j in range(i+16, len(ct), 16):
- if ct[i:i+16] == ct[j:j+16]:
- print("Block collision found in ciphertext: {} Block {} == Block {}".format(line, i//16, j//16))
- break
- else:
- continue
- break
- else:
- continue
- break
+ coll_count = {}
+ for idx, line in enumerate(chal_file):
+ count = 0
+ ct = line[:-1]
+ for i in range(0, len(ct), 32):
+ for j in range(i+32, len(ct), 32):
+ if ct[i:i+32] == ct[j:j+32]:
+ print("Block collision found in ciphertext: {} Block {} == Block {}".format(idx, i//16, j//16))
+ count += 1
+ coll_count[idx] = count
+
+ most_coll = max(coll_count, key=coll_count.get)
+ print("Ciphertext {} had the most collisions with {}".format(most_coll, coll_count[most_coll]))
+ |
02d1b76067a8c3b2de9abc09cd841fe8b8bd7605 | example/app/integrations/fps_integration.py | example/app/integrations/fps_integration.py | from billing.integrations.amazon_fps_integration import AmazonFpsIntegration as Integration
from django.core.urlresolvers import reverse
import urlparse
class FpsIntegration(Integration):
def transaction(self, request):
"""Ideally at this method, you will check the
caller reference against a user id or uniquely
identifiable attribute (if you are already not
using it as the caller reference) and the type
of transaction (either pay, reserve etc). For
the sake of the example, we assume all the users
get charged $100"""
request_url = request.build_absolute_uri()
parsed_url = urlparse.urlparse(request_url)
query = parsed_url.query
dd = dict(map(lambda x: x.split("="), query.split("&")))
resp = self.purchase(100, dd)
return "%s?status=%s" %(reverse("app_offsite_amazon_fps"),
resp["status"])
| from billing.integrations.amazon_fps_integration import AmazonFpsIntegration as Integration
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
import urlparse
class FpsIntegration(Integration):
def transaction(self, request):
"""Ideally at this method, you will check the
caller reference against a user id or uniquely
identifiable attribute (if you are already not
using it as the caller reference) and the type
of transaction (either pay, reserve etc). For
the sake of the example, we assume all the users
get charged $100"""
request_url = request.build_absolute_uri()
parsed_url = urlparse.urlparse(request_url)
query = parsed_url.query
dd = dict(map(lambda x: x.split("="), query.split("&")))
resp = self.purchase(100, dd)
return HttpResponseRedirect("%s?status=%s" %(reverse("app_offsite_amazon_fps"),
resp["status"]))
| Use the HttpResponseRedirect for redirection. | Use the HttpResponseRedirect for redirection.
| Python | bsd-3-clause | biddyweb/merchant,spookylukey/merchant,spookylukey/merchant,digideskio/merchant,mjrulesamrat/merchant,agiliq/merchant,SimpleTax/merchant,mjrulesamrat/merchant,biddyweb/merchant,SimpleTax/merchant,agiliq/merchant,digideskio/merchant | ---
+++
@@ -1,5 +1,6 @@
from billing.integrations.amazon_fps_integration import AmazonFpsIntegration as Integration
from django.core.urlresolvers import reverse
+from django.http import HttpResponseRedirect
import urlparse
class FpsIntegration(Integration):
@@ -16,5 +17,5 @@
query = parsed_url.query
dd = dict(map(lambda x: x.split("="), query.split("&")))
resp = self.purchase(100, dd)
- return "%s?status=%s" %(reverse("app_offsite_amazon_fps"),
- resp["status"])
+ return HttpResponseRedirect("%s?status=%s" %(reverse("app_offsite_amazon_fps"),
+ resp["status"])) |
886fac0476d05806c5d396f0740bc24f3fa343ed | rslinac/pkcli/beam_solver.py | rslinac/pkcli/beam_solver.py | import rslinac
def run(ini_filename, input_filename, output_filename):
rslinac.run_beam_solver(ini_filename, input_filename, output_filename)
| import rslinac
from argh import arg
@arg('ini', help='path configuration file in INI format')
@arg('input', help='path to file with input data')
@arg('output', help='path to file to write output data')
def run(ini, input, output):
"""runs the beam solver"""
rslinac.run_beam_solver(ini, input, output)
| Add documentation to cli command arguments | Add documentation to cli command arguments
| Python | apache-2.0 | elventear/rslinac,radiasoft/rslinac,radiasoft/rslinac,elventear/rslinac,elventear/rslinac,radiasoft/rslinac,radiasoft/rslinac,radiasoft/rslinac,elventear/rslinac,elventear/rslinac,elventear/rslinac | ---
+++
@@ -1,4 +1,9 @@
import rslinac
+from argh import arg
-def run(ini_filename, input_filename, output_filename):
- rslinac.run_beam_solver(ini_filename, input_filename, output_filename)
+@arg('ini', help='path configuration file in INI format')
+@arg('input', help='path to file with input data')
+@arg('output', help='path to file to write output data')
+def run(ini, input, output):
+ """runs the beam solver"""
+ rslinac.run_beam_solver(ini, input, output) |
077016fbe6ee17c8eb3528b957b05eb4682b8d26 | scrapi/processing/elastic_search.py | scrapi/processing/elastic_search.py | import json
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
logger = logging.getLogger(__name__)
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
ignore=[404]
)
logger.info(json.dumps(old_doc, indent=4))
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
| import json
import logging
from elasticsearch import Elasticsearch
from scrapi import settings
from scrapi.processing.base import BaseProcessor
es = Elasticsearch(
settings.ELASTIC_URI,
request_timeout=settings.ELASTIC_TIMEOUT
)
logging.getLogger('elasticsearch').setLevel(logging.WARN)
logging.getLogger('elasticsearch.trace').setLevel(logging.WARN)
logging.getLogger('urllib3').setLevel(logging.WARN)
logging.getLogger('requests').setLevel(logging.WARN)
es.cluster.health(wait_for_status='yellow')
logger = logging.getLogger(__name__)
class ElasticsearchProcessor(BaseProcessor):
NAME = 'elasticsearch'
def process_normalized(self, raw_doc, normalized):
data = {
key: value for key, value in normalized.attributes.items()
if key in settings.FRONTEND_KEYS
}
normalized['dateUpdated'] = self.version_dateUpdated(normalized)
es.index(
body=data,
refresh=True,
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
def version_dateUpdated(self, normalized):
try:
old_doc = es.get_source(
index='share',
doc_type=normalized['source'],
id=normalized['id']['serviceID'],
)
except elasticsearch.IndexMissingException:
return normalized['dateUpdated']
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated']
| Handle 404s due to index not existing when doing versioning | Handle 404s due to index not existing when doing versioning
| Python | apache-2.0 | jeffreyliu3230/scrapi,ostwald/scrapi,felliott/scrapi,felliott/scrapi,fabianvf/scrapi,erinspace/scrapi,fabianvf/scrapi,erinspace/scrapi,mehanig/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,icereval/scrapi,alexgarciac/scrapi,CenterForOpenScience/scrapi | ---
+++
@@ -40,13 +40,13 @@
)
def version_dateUpdated(self, normalized):
- old_doc = es.get_source(
- index='share',
- doc_type=normalized['source'],
- id=normalized['id']['serviceID'],
- ignore=[404]
- )
-
- logger.info(json.dumps(old_doc, indent=4))
+ try:
+ old_doc = es.get_source(
+ index='share',
+ doc_type=normalized['source'],
+ id=normalized['id']['serviceID'],
+ )
+ except elasticsearch.IndexMissingException:
+ return normalized['dateUpdated']
return old_doc['dateUpdated'] if old_doc else normalized['dateUpdated'] |
646bebdef900869c7a53997d6e9ab79150c0257c | server/apps/webterminal/handlers.py | server/apps/webterminal/handlers.py | import tornado.web
import tornado.escape
import json
import time
import os
import settings as global_settings
from lib.handlers.base import BaseHandler
class EmbedTerminalHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
if not self.get_secure_cookie('gateone_ssl'):
self.set_secure_cookie('gateone_ssl','accepted')
self.redirect(global_settings.GATEONE_STATIC_URL+'/static/accept_certificate.html')
else:
secret = global_settings.TERMINAL_SECRET
authobj = {
'api_key': global_settings.TERMINAL_API_KEY,
'upn': self.get_current_user(),
'timestamp': str(int(time.time())),
'signature': "",
'signature_method': 'HMAC-SHA1',
'api_version': '1.0'
}
authobj['signature'] = self.create_signature(secret, authobj['api_key'], authobj['upn'], authobj['timestamp'])
ctx = {
'authobj': authobj,
'gateone_url': global_settings.GATEONE_URL,
'gateone_origins_url': global_settings.GATEONE_ORIGINS_URL
}
self.write(ctx)
@tornado.web.authenticated
def create_signature(self, secret, *parts):
import hmac, hashlib
hash = hmac.new(secret, digestmod=hashlib.sha1)
for part in parts:
hash.update(str(part))
return hash.hexdigest()
| import tornado.web
import tornado.escape
import json
import time
import os
import settings as global_settings
from lib.handlers.base import BaseHandler
class EmbedTerminalHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
if not self.get_secure_cookie('gateone_ssl'):
self.set_secure_cookie('gateone_ssl','accepted')
self.redirect(global_settings.GATEONE_STATIC_URL+'/static/accept_certificate.html')
else:
secret = global_settings.TERMINAL_SECRET
authobj = {
'api_key': global_settings.TERMINAL_API_KEY,
'upn': self.get_current_user(),
'timestamp': str(int(time.time() * 1000)),
'signature': "",
'signature_method': 'HMAC-SHA1',
'api_version': '1.0'
}
authobj['signature'] = self.create_signature(secret, authobj['api_key'], authobj['upn'], authobj['timestamp'])
ctx = {
'authobj': authobj,
'gateone_url': global_settings.GATEONE_URL,
'gateone_origins_url': global_settings.GATEONE_ORIGINS_URL
}
self.write(ctx)
@tornado.web.authenticated
def create_signature(self, secret, *parts):
import hmac, hashlib
hash = hmac.new(secret, digestmod=hashlib.sha1)
for part in parts:
hash.update(str(part))
return hash.hexdigest()
| Revert previous change to auth timestamp. | Revert previous change to auth timestamp. | Python | mit | SandstoneHPC/sandstone-ide,SandstoneHPC/OIDE,ResearchComputing/OIDE,ResearchComputing/OIDE,SandstoneHPC/OIDE,SandstoneHPC/sandstone-ide,ResearchComputing/OIDE,ResearchComputing/OIDE,SandstoneHPC/sandstone-ide,SandstoneHPC/OIDE | ---
+++
@@ -22,7 +22,7 @@
authobj = {
'api_key': global_settings.TERMINAL_API_KEY,
'upn': self.get_current_user(),
- 'timestamp': str(int(time.time())),
+ 'timestamp': str(int(time.time() * 1000)),
'signature': "",
'signature_method': 'HMAC-SHA1',
'api_version': '1.0' |
36288cf5357d58b0989b090965fd231bb01137ed | imager/profiles/tests.py | imager/profiles/tests.py | from django.test import TestCase
import factory
from django.contrib.auth.models import User
from profiles.models import ImagerProfile
class UserFactory(factory.django.DjangoModelFactory):
class Meta:
model = User
django_get_or_create = ('username',)
username = 'john'
class Test_ImagerProfile(TestCase):
def setUp(self):
self.usertest = UserFactory()
def test_create(self):
assert self.usertest.username == 'john'
assert type(self.usertest) is User
assert type(self.usertest.profile) is ImagerProfile
def test_delete(self):
self.sally = UserFactory(username='sally')
assert self.sally.profile in ImagerProfile.objects.all()
self.sally.delete()
assert self.sally.profile not in ImagerProfile.objects.all()
def test_active(self):
assert self.usertest.profile in ImagerProfile.active.all()
def test_inactive(self):
self.usertest.is_active = False
self.usertest.save()
print ImagerProfile.active.all()
assert self.usertest.profile not in ImagerProfile.active.all()
def test_reactivate(self):
self.usertest.is_active = True
self.usertest.save()
assert self.usertest.is_active is True
| from django.test import TestCase
import factory
from django.contrib.auth.models import User
from profiles.models import ImagerProfile
class UserFactory(factory.django.DjangoModelFactory):
"""Creates a test user not: non permante to db"""
class Meta:
model = User
django_get_or_create = ('username',)
username = 'john'
class Test_ImagerProfile(TestCase):
def setUp(self):
"""Creats the User defined in UserFactory"""
self.usertest = UserFactory()
def test_create(self):
"""Test that a profile is created with a User creation"""
assert self.usertest.username == 'john'
type(self.usertest) is User
assert type(self.usertest.profile) is ImagerProfile
def test_delete(self):
"""Test that a profile is delted when a User is delted"""
self.sally = UserFactory(username='sally')
assert self.sally.profile in ImagerProfile.objects.all()
self.sally.delete()
assert self.sally.profile not in ImagerProfile.objects.all()
def test_active(self):
"""Test that a newly created profile is defaulting to active"""
assert self.usertest.profile in ImagerProfile.active.all()
def test_inactive(self):
"""Test that changing a user to inactive also makes the profile
inactive"""
self.usertest.is_active = False
self.usertest.save()
ImagerProfile.active.all()
assert self.usertest.profile not in ImagerProfile.active.all()
def test_reactivate(self):
"""Test that changing a user to active also makes the profile
active"""
self.usertest.is_active = True
self.usertest.save()
assert self.usertest.is_active is True
| Update docstrings for test file | Update docstrings for test file
| Python | mit | edpark13/django-imager | ---
+++
@@ -5,41 +5,48 @@
class UserFactory(factory.django.DjangoModelFactory):
+ """Creates a test user not: non permante to db"""
class Meta:
model = User
django_get_or_create = ('username',)
username = 'john'
+
class Test_ImagerProfile(TestCase):
def setUp(self):
+ """Creats the User defined in UserFactory"""
self.usertest = UserFactory()
def test_create(self):
+ """Test that a profile is created with a User creation"""
assert self.usertest.username == 'john'
- assert type(self.usertest) is User
+ type(self.usertest) is User
assert type(self.usertest.profile) is ImagerProfile
def test_delete(self):
+ """Test that a profile is delted when a User is delted"""
self.sally = UserFactory(username='sally')
assert self.sally.profile in ImagerProfile.objects.all()
self.sally.delete()
assert self.sally.profile not in ImagerProfile.objects.all()
def test_active(self):
+ """Test that a newly created profile is defaulting to active"""
assert self.usertest.profile in ImagerProfile.active.all()
def test_inactive(self):
+ """Test that changing a user to inactive also makes the profile
+ inactive"""
self.usertest.is_active = False
self.usertest.save()
- print ImagerProfile.active.all()
+ ImagerProfile.active.all()
assert self.usertest.profile not in ImagerProfile.active.all()
def test_reactivate(self):
+ """Test that changing a user to active also makes the profile
+ active"""
self.usertest.is_active = True
self.usertest.save()
assert self.usertest.is_active is True
-
-
- |
d142bed6916d8b34509c12623b4802eca9206695 | tests/test_ab_testing.py | tests/test_ab_testing.py | from . import TheInternetTestCase
from helium.api import go_to, S, get_driver
class AbTestingTest(TheInternetTestCase):
def get_page(self):
return "http://the-internet.herokuapp.com/abtest"
def test_ab_variates(self):
variation = S("h3")
first_variation = variation.web_element.text
self.assertIn(
first_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
)
get_driver().delete_all_cookies()
go_to("http://the-internet.herokuapp.com/abtest")
variation = S("h3")
second_variation = variation.web_element.text
self.assertIn(
second_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
)
self.assertNotEqual(first_variation, second_variation) | from . import TheInternetTestCase
from helium.api import go_to, S, get_driver
class AbTestingTest(TheInternetTestCase):
def get_page(self):
return "http://the-internet.herokuapp.com/abtest"
def test_ab_variates(self):
header = S("h3")
first_variation = header.web_element.text
self.assertIn(
first_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
)
second_variation = first_variation
while second_variation == first_variation:
get_driver().delete_all_cookies()
go_to("http://the-internet.herokuapp.com/abtest")
header = S("h3")
second_variation = header.web_element.text
self.assertIn(
second_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
)
self.assertNotEqual(first_variation, second_variation) | Make the AB test case more stable. | Make the AB test case more stable.
| Python | mit | bugfree-software/the-internet-solution-python | ---
+++
@@ -5,16 +5,18 @@
def get_page(self):
return "http://the-internet.herokuapp.com/abtest"
def test_ab_variates(self):
- variation = S("h3")
- first_variation = variation.web_element.text
+ header = S("h3")
+ first_variation = header.web_element.text
self.assertIn(
first_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
)
- get_driver().delete_all_cookies()
- go_to("http://the-internet.herokuapp.com/abtest")
- variation = S("h3")
- second_variation = variation.web_element.text
- self.assertIn(
- second_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
- )
+ second_variation = first_variation
+ while second_variation == first_variation:
+ get_driver().delete_all_cookies()
+ go_to("http://the-internet.herokuapp.com/abtest")
+ header = S("h3")
+ second_variation = header.web_element.text
+ self.assertIn(
+ second_variation, [u"A/B Test Variation 1", u"A/B Test Control"]
+ )
self.assertNotEqual(first_variation, second_variation) |
3d95a986538dce6476962b46b0075303f2055311 | comics/core/middleware.py | comics/core/middleware.py | import re
from django.utils.html import strip_spaces_between_tags
from django.conf import settings
RE_MULTISPACE = re.compile(r'\s{2,}')
RE_NEWLINE = re.compile(r'\n')
class MinifyHTMLMiddleware(object):
def process_response(self, request, response):
if 'text/html' in response['Content-Type'] and settings.COMPRESS_HTML:
response.content = strip_spaces_between_tags(
response.content.strip())
response.content = RE_MULTISPACE.sub(" ", response.content)
response.content = RE_NEWLINE.sub("", response.content)
return response
| import re
from django.utils.html import strip_spaces_between_tags
from django.conf import settings
RE_MULTISPACE = re.compile(r'\s{2,}')
RE_NEWLINE = re.compile(r'\n')
class MinifyHTMLMiddleware(object):
def process_response(self, request, response):
if 'text/html' in response['Content-Type'] and settings.COMPRESS_HTML:
response.content = strip_spaces_between_tags(
response.content.strip())
response.content = RE_MULTISPACE.sub(' ', response.content)
response.content = RE_NEWLINE.sub(' ', response.content)
return response
| Replace newlines with a single space instead of nothing when minifying HTML | Replace newlines with a single space instead of nothing when minifying HTML
| Python | agpl-3.0 | klette/comics,jodal/comics,jodal/comics,datagutten/comics,klette/comics,klette/comics,datagutten/comics,datagutten/comics,datagutten/comics,jodal/comics,jodal/comics | ---
+++
@@ -11,6 +11,6 @@
if 'text/html' in response['Content-Type'] and settings.COMPRESS_HTML:
response.content = strip_spaces_between_tags(
response.content.strip())
- response.content = RE_MULTISPACE.sub(" ", response.content)
- response.content = RE_NEWLINE.sub("", response.content)
+ response.content = RE_MULTISPACE.sub(' ', response.content)
+ response.content = RE_NEWLINE.sub(' ', response.content)
return response |
8a73d31a9bbff831be3e92b73ddb0841e61b3457 | reviewboard/admin/tests.py | reviewboard/admin/tests.py | from django.conf import settings
from django.test import TestCase
from reviewboard.admin import checks
class UpdateTests(TestCase):
"""Tests for update required pages"""
def tearDown(self):
# Make sure we don't break further tests by resetting this fully.
checks.reset_check_cache()
def testManualUpdatesRequired(self):
"""Testing check_updates_required with valid configuration"""
# NOTE: This is assuming the install is fine. It should be given
# that we set things like the uploaded path correctly to
# a known good directory before starting unit tests.
updates_required = checks.check_updates_required()
self.assertEqual(len(updates_required), 0)
def testManualUpdatesRequiredBadUpload(self):
"""Testing check_updates_required with a bad upload directory"""
old_media_root = settings.MEDIA_ROOT
settings.MEDIA_ROOT = "/"
checks.reset_check_cache()
updates_required = checks.check_updates_required()
settings.MEDIA_ROOT = old_media_root
self.assertEqual(len(updates_required), 1)
url, data = updates_required[0]
self.assertEqual(url, "admin/manual-updates/media-upload-dir.html")
response = self.client.get("/")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "admin/manual_updates_required.html")
| from django.conf import settings
from django.test import TestCase
from reviewboard.admin import checks
class UpdateTests(TestCase):
"""Tests for update required pages"""
def tearDown(self):
# Make sure we don't break further tests by resetting this fully.
checks.reset_check_cache()
def testManualUpdatesRequired(self):
"""Testing check_updates_required with valid configuration"""
# NOTE: This is assuming the install is fine. It should be given
# that we set things like the uploaded path correctly to
# a known good directory before starting unit tests.
updates_required = checks.check_updates_required()
self.assertEqual(len(updates_required), 0)
def testManualUpdatesRequiredBadMediaDirs(self):
"""Testing check_updates_required with bad media directories"""
old_media_root = settings.MEDIA_ROOT
settings.MEDIA_ROOT = "/"
checks.reset_check_cache()
updates_required = checks.check_updates_required()
settings.MEDIA_ROOT = old_media_root
# Should complain about ext and upload directories.
self.assertEqual(len(updates_required), 2)
url, data = updates_required[0]
self.assertEqual(url, "admin/manual-updates/media-upload-dir.html")
url, data = updates_required[1]
self.assertEqual(url, "admin/manual-updates/ext-dir.html")
response = self.client.get("/")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "admin/manual_updates_required.html")
| Fix the media manual updates unit test to account for the new ext dir page. | Fix the media manual updates unit test to account for the new ext dir page.
My previous change for the extension directory manual updates page broke
the unit tests. The existing test for the upload directory didn't take
into account that the extension directory would also now be needed. The
test was fixed and renamed to signify that it checks for media dirs now.
| Python | mit | brennie/reviewboard,beol/reviewboard,sgallagher/reviewboard,custode/reviewboard,1tush/reviewboard,atagar/ReviewBoard,beol/reviewboard,1tush/reviewboard,davidt/reviewboard,brennie/reviewboard,1tush/reviewboard,Khan/reviewboard,davidt/reviewboard,1tush/reviewboard,Khan/reviewboard,atagar/ReviewBoard,custode/reviewboard,atagar/ReviewBoard,davidt/reviewboard,1tush/reviewboard,bkochendorfer/reviewboard,KnowNo/reviewboard,sgallagher/reviewboard,KnowNo/reviewboard,atagar/ReviewBoard,bkochendorfer/reviewboard,Khan/reviewboard,reviewboard/reviewboard,atagar/ReviewBoard,reviewboard/reviewboard,custode/reviewboard,reviewboard/reviewboard,brennie/reviewboard,Khan/reviewboard,chipx86/reviewboard,chipx86/reviewboard,custode/reviewboard,KnowNo/reviewboard,1tush/reviewboard,sgallagher/reviewboard,1tush/reviewboard,davidt/reviewboard,brennie/reviewboard,Khan/reviewboard,Khan/reviewboard,bkochendorfer/reviewboard,beol/reviewboard,chipx86/reviewboard,Khan/reviewboard,chipx86/reviewboard,beol/reviewboard,atagar/ReviewBoard,sgallagher/reviewboard,1tush/reviewboard,atagar/ReviewBoard,Khan/reviewboard,bkochendorfer/reviewboard,Khan/reviewboard,1tush/reviewboard,KnowNo/reviewboard,atagar/ReviewBoard,reviewboard/reviewboard,atagar/ReviewBoard | ---
+++
@@ -20,8 +20,8 @@
self.assertEqual(len(updates_required), 0)
- def testManualUpdatesRequiredBadUpload(self):
- """Testing check_updates_required with a bad upload directory"""
+ def testManualUpdatesRequiredBadMediaDirs(self):
+ """Testing check_updates_required with bad media directories"""
old_media_root = settings.MEDIA_ROOT
settings.MEDIA_ROOT = "/"
checks.reset_check_cache()
@@ -29,11 +29,15 @@
updates_required = checks.check_updates_required()
settings.MEDIA_ROOT = old_media_root
- self.assertEqual(len(updates_required), 1)
+ # Should complain about ext and upload directories.
+ self.assertEqual(len(updates_required), 2)
url, data = updates_required[0]
self.assertEqual(url, "admin/manual-updates/media-upload-dir.html")
+ url, data = updates_required[1]
+ self.assertEqual(url, "admin/manual-updates/ext-dir.html")
+
response = self.client.get("/")
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, "admin/manual_updates_required.html") |
8940d3805f9377654046ff8b00807472b6925149 | lib/setup.py | lib/setup.py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import numpy as np
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
cmdclass = {}
ext_modules = [
Extension(
"utils.cython_bbox",
["utils/bbox.pyx"],
extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
),
Extension(
"utils.cython_nms",
["utils/nms.pyx"],
extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
)
]
cmdclass.update({'build_ext': build_ext})
setup(
name='fast_rcnn',
cmdclass=cmdclass,
ext_modules=ext_modules,
include_dirs=[np.get_include()]
)
| # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
import numpy as np
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
cmdclass = {}
ext_modules = [
Extension(
"utils.cython_bbox",
["utils/bbox.pyx"],
#extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
),
Extension(
"utils.cython_nms",
["utils/nms.pyx"],
#extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
)
]
cmdclass.update({'build_ext': build_ext})
setup(
name='fast_rcnn',
cmdclass=cmdclass,
ext_modules=ext_modules,
include_dirs=[np.get_include()]
)
| Comment compilation options for Windows. | Comment compilation options for Windows. | Python | mit | only4hj/fast-rcnn,only4hj/fast-rcnn,only4hj/fast-rcnn | ---
+++
@@ -15,12 +15,12 @@
Extension(
"utils.cython_bbox",
["utils/bbox.pyx"],
- extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
+ #extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
),
Extension(
"utils.cython_nms",
["utils/nms.pyx"],
- extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
+ #extra_compile_args=["-Wno-cpp", "-Wno-unused-function"],
)
]
cmdclass.update({'build_ext': build_ext}) |
4753a6d19d00f9669e864f92730d56aaf31575da | 1-multiples-of-3-and-5.py | 1-multiples-of-3-and-5.py | from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
if __name__ == '__main__':
print(sum(threes_and_fives_gen(10000000)))
| from itertools import chain
def threes_and_fives_gen(num=1000):
for i in range(num):
if i % 3 == 0 or i % 5 == 0:
yield i
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
def solve(n):
return sum(
filter(lambda x: x%3==0 or x%5==0,
range(1, n)
)
)
if __name__ == '__main__':
print(sum(three_and_fives_gen(10000000)))
| Add functional solution to 1 | Add functional solution to 1
| Python | mit | dawran6/project-euler | ---
+++
@@ -8,5 +8,12 @@
def threes_and_fives_fun(n):
return set(chain(range(3, n+1, 3), range(5, n+1, 5)))
+def solve(n):
+ return sum(
+ filter(lambda x: x%3==0 or x%5==0,
+ range(1, n)
+ )
+ )
+
if __name__ == '__main__':
- print(sum(threes_and_fives_gen(10000000)))
+ print(sum(three_and_fives_gen(10000000))) |
80503c24854e976fa4bc86319f6c11dc3a5186b2 | test/test_property.py | test/test_property.py | import unittest
from odml import Property, Section, Document
class TestProperty(unittest.TestCase):
def setUp(self):
pass
def test_value(self):
p = Property("property", 100)
assert(p.value[0] == 100)
def test_name(self):
pass
def test_parent(self):
pass
def test_dtype(self):
pass
def test_path(self):
pass
if __name__ == "__main__":
print("TestProperty")
tp = TestProperty()
tp.test_value()
| import unittest
from odml import Property, Section, Document, DType
class TestProperty(unittest.TestCase):
def setUp(self):
pass
def test_value(self):
p = Property("property", 100)
assert(p.value[0] == 100)
def test_bool_conversion(self):
p = Property(name='received', value=[3, 0, 1, 0, 8])
assert(p.dtype == 'int')
p.dtype = DType.boolean
assert(p.dtype == 'boolean')
assert(p.value == [True, False, True, False, True])
q = Property(name='sent', value=['False', True, 'TRUE', '0'])
assert(q.dtype == 'string')
q.dtype = DType.boolean
assert(q.dtype == 'boolean')
assert(q.value == [False, True, True, False])
def test_name(self):
pass
def test_parent(self):
pass
def test_dtype(self):
pass
def test_path(self):
pass
if __name__ == "__main__":
print("TestProperty")
tp = TestProperty()
tp.test_value()
| Add tests for boolean conversion | Add tests for boolean conversion
| Python | bsd-3-clause | lzehl/python-odml | ---
+++
@@ -1,5 +1,5 @@
import unittest
-from odml import Property, Section, Document
+from odml import Property, Section, Document, DType
class TestProperty(unittest.TestCase):
@@ -10,6 +10,21 @@
def test_value(self):
p = Property("property", 100)
assert(p.value[0] == 100)
+
+ def test_bool_conversion(self):
+
+ p = Property(name='received', value=[3, 0, 1, 0, 8])
+ assert(p.dtype == 'int')
+ p.dtype = DType.boolean
+ assert(p.dtype == 'boolean')
+ assert(p.value == [True, False, True, False, True])
+
+ q = Property(name='sent', value=['False', True, 'TRUE', '0'])
+ assert(q.dtype == 'string')
+ q.dtype = DType.boolean
+ assert(q.dtype == 'boolean')
+ assert(q.value == [False, True, True, False])
+
def test_name(self):
pass |
f2005fadb9fb2e2bcad32286a9d993c291c1992e | lazyblacksmith/models/api/industry_index.py | lazyblacksmith/models/api/industry_index.py | # -*- encoding: utf-8 -*-
from . import db
from lazyblacksmith.models import Activity
class IndustryIndex(db.Model):
solarsystem_id = db.Column(
db.Integer, db.ForeignKey('solar_system.id'), primary_key=True
)
solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes'))
activity = db.Column(db.Integer, primary_key=True, autoincrement=False)
cost_index = db.Column(
db.Numeric(
precision=20,
scale=19,
decimal_return_scale=19,
asdecimal=False
),
nullable=True)
@classmethod
def activity_string_to_activity(cls, activity_string):
if activity_string == 'invention':
return Activity.INVENTION
if activity_string == 'manufacturing':
return Activity.MANUFACTURING
if activity_string == 'researching_time_efficiency':
return Activity.RESEARCH_TIME_EFFICIENCY
if activity_string == 'researching_material_efficiency':
return Activity.RESEARCH_MATERIAL_EFFICIENCY
if activity_string == 'copying':
return Activity.COPYING
| # -*- encoding: utf-8 -*-
from . import db
from lazyblacksmith.models import Activity
class IndustryIndex(db.Model):
solarsystem_id = db.Column(
db.Integer, db.ForeignKey('solar_system.id'), primary_key=True
)
solarsystem = db.relationship('SolarSystem', backref=db.backref('indexes'))
activity = db.Column(db.Integer, primary_key=True, autoincrement=False)
cost_index = db.Column(
db.Numeric(
precision=20,
scale=19,
decimal_return_scale=19,
asdecimal=False
),
nullable=True)
@classmethod
def activity_string_to_activity(cls, activity_string):
if activity_string == 'invention':
return Activity.INVENTION
if activity_string == 'manufacturing':
return Activity.MANUFACTURING
if activity_string == 'researching_time_efficiency':
return Activity.RESEARCH_TIME_EFFICIENCY
if activity_string == 'researching_material_efficiency':
return Activity.RESEARCH_MATERIAL_EFFICIENCY
if activity_string == 'copying':
return Activity.COPYING
if activity_string == 'reaction':
return Activity.REACTIONS
| Fix celery task for industry indexes by adding missing field | Fix celery task for industry indexes by adding missing field
| Python | bsd-3-clause | Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith | ---
+++
@@ -32,3 +32,5 @@
return Activity.RESEARCH_MATERIAL_EFFICIENCY
if activity_string == 'copying':
return Activity.COPYING
+ if activity_string == 'reaction':
+ return Activity.REACTIONS |
874a6eff186d1c1ca6f90d69fd24fad11180c5a9 | thread_output_ctrl.py | thread_output_ctrl.py | import threading
import wx
from styled_text_ctrl import StyledTextCtrl
class ThreadOutputCtrl(StyledTextCtrl):
def __init__(self, parent, env, auto_scroll=False):
StyledTextCtrl.__init__(self, parent, env)
self.auto_scroll = auto_scroll
self.__lock = threading.Lock()
self.__queue = []
self.__timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.__OnTimer, self.__timer)
def __OnTimer(self, evt):
self.flush()
def flush(self):
with self.__lock:
queue, self.__queue = self.__queue, []
lines = "".join(queue)
if lines:
with self.ModifyReadOnly():
self.AppendText(lines)
self.EmptyUndoBuffer()
if self.auto_scroll:
self.ScrollToLine(self.GetLineCount() - 1)
def start(self, interval=100):
self.SetReadOnly(True)
self.__timer.Start(interval)
def stop(self):
self.__timer.Stop()
self.flush()
self.SetReadOnly(False)
def write(self, s):
with self.__lock:
self.__queue.append(s)
def ClearAll(self):
with self.ModifyReadOnly():
StyledTextCtrl.ClearAll(self)
| import threading
import wx
from styled_text_ctrl import StyledTextCtrl
class ThreadOutputCtrl(StyledTextCtrl):
def __init__(self, parent, env, auto_scroll=False):
StyledTextCtrl.__init__(self, parent, env)
self.auto_scroll = auto_scroll
self.__lock = threading.Lock()
self.__queue = []
self.__timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.__OnTimer, self.__timer)
def __OnTimer(self, evt):
self.flush()
def flush(self):
with self.__lock:
queue, self.__queue = self.__queue, []
lines = "".join(queue)
if lines:
with self.ModifyReadOnly():
self.AppendText(lines)
self.EmptyUndoBuffer()
if self.auto_scroll:
self.ScrollToLine(self.GetLineCount() - 1)
def start(self, interval=100):
self.SetReadOnly(True)
self.__timer.Start(interval)
def stop(self):
self.__timer.Stop()
self.flush()
self.SetReadOnly(False)
def write(self, s):
with self.__lock:
self.__queue.append(s)
def ClearAll(self):
with self.ModifyReadOnly():
StyledTextCtrl.ClearAll(self)
self.EmptyUndoBuffer()
| Clear undo buffer when terminal cleared. | Clear undo buffer when terminal cleared.
| Python | mit | shaurz/devo | ---
+++
@@ -44,3 +44,4 @@
def ClearAll(self):
with self.ModifyReadOnly():
StyledTextCtrl.ClearAll(self)
+ self.EmptyUndoBuffer() |
53080f89af51340b0b2c1854e0a4bf38346c14a8 | kill.py | kill.py | #!/usr/bin/env python2
return 1
| #!/usr/bin/env python2
from datetime import datetime, timedelta
from json import loads
import sys
if len(sys.argv) < 2:
raise Exception("Need an amount of keep-days of which to save your comments.")
days = int(sys.argv[1])
before_time = datetime.now() - timedelta(days=days)
f = open('data.json', 'r')
data = loads(f.read())
f.close()
for d in data:
date = datetime.fromtimestamp(d['date'])
if date < before_time:
delete_post(d['id'])
| Work out now() - 7 days | Work out now() - 7 days
| Python | bsd-2-clause | bparafina/Shreddit,bparafina/Shreddit,ijkilchenko/Shreddit,ijkilchenko/Shreddit | ---
+++
@@ -1,3 +1,21 @@
#!/usr/bin/env python2
-return 1
+from datetime import datetime, timedelta
+from json import loads
+import sys
+
+if len(sys.argv) < 2:
+ raise Exception("Need an amount of keep-days of which to save your comments.")
+
+days = int(sys.argv[1])
+
+before_time = datetime.now() - timedelta(days=days)
+
+f = open('data.json', 'r')
+data = loads(f.read())
+f.close()
+
+for d in data:
+ date = datetime.fromtimestamp(d['date'])
+ if date < before_time:
+ delete_post(d['id']) |
6047cab9c099c8a6740b7de1006f41e7d10f9f65 | jal_stats/stats/views.py | jal_stats/stats/views.py | # from django.shortcuts import render
from rest_framework import viewsets
from .models import Datapoint, Activity
from .serializers import ActivitySerializer, DatapointSerializer
# Create your views here.
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
def get_queryset(self):
return self.request.user.activity_set.all()
class DatapointViewSet(viewsets.ModelViewSet):
serializer_class = DatapointSerializer
def get_queryset(self):
return Datapoint.objects.all().filter(
# user=self.request.user,
activity=self.request.query_params['activity'])
| # from django.shortcuts import render
from rest_framework import viewsets
from .models import Datapoint, Activity
from .serializers import ActivitySerializer, DatapointSerializer
# Create your views here.
class ActivityViewSet(viewsets.ModelViewSet):
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
# def get_queryset(self):
# return self.request.user.activity_set.all()
class DatapointViewSet(viewsets.ModelViewSet):
serializer_class = DatapointSerializer
def get_queryset(self):
return Datapoint.objects.all().filter(
# user=self.request.user,
activity=self.request.query_params['activity'])
| Remove more remnants of users | Remove more remnants of users
| Python | mit | jal-stats/django | ---
+++
@@ -10,8 +10,8 @@
queryset = Activity.objects.all()
serializer_class = ActivitySerializer
- def get_queryset(self):
- return self.request.user.activity_set.all()
+ # def get_queryset(self):
+ # return self.request.user.activity_set.all()
class DatapointViewSet(viewsets.ModelViewSet): |
9d3889a67ff6de69cd539b688cf3c2b9db17f0cb | jarn/mkrelease/python.py | jarn/mkrelease/python.py | from process import Process
from exit import err_exit
class Python(object):
"""A Python interpreter path that can test itself."""
def __init__(self, defaults, process=None):
self.process = process or Process()
self.python = defaults.python
def __str__(self):
return self.python
def is_valid_python(self, python=None):
version = self.get_python_version(python)
if version and version >= '2.6':
return True
return False
def check_valid_python(self, python=None):
version = self.get_python_version(python)
if not version:
err_exit('Bad interpreter')
if version < '2.6':
err_exit('Python >= 2.6 required')
def get_python_version(self, python=None):
if python is None:
python = self.python
version = self.process.pipe(
'"%(python)s" -c"import sys; print sys.version[:3]"' % locals())
return version
| import sys
from process import Process
from exit import err_exit
class Python(object):
"""A Python interpreter path that can test itself."""
def __init__(self, defaults, process=None):
self.process = process or Process()
self.python = defaults.python
def __str__(self):
return self.python
def is_valid_python(self, python=None):
version = self.get_python_version(python)
if version and version >= '2.6':
return True
return False
def check_valid_python(self, python=None):
version = self.get_python_version(python)
if not version:
err_exit('Bad interpreter')
if version < '2.6':
err_exit('Python >= 2.6 required')
def get_python_version(self, python=None):
if python is None:
python = self.python
if python == sys.executable:
return sys.version[:3]
version = self.process.pipe(
'"%(python)s" -c"import sys; print sys.version[:3]"' % locals())
return version
| Optimize the common case to reduce startup time. | Optimize the common case to reduce startup time.
| Python | bsd-2-clause | Jarn/jarn.mkrelease | ---
+++
@@ -1,3 +1,5 @@
+import sys
+
from process import Process
from exit import err_exit
@@ -28,6 +30,8 @@
def get_python_version(self, python=None):
if python is None:
python = self.python
+ if python == sys.executable:
+ return sys.version[:3]
version = self.process.pipe(
'"%(python)s" -c"import sys; print sys.version[:3]"' % locals())
return version |
72f8249cb26ad38e77ac74a7d149839fb3a1cf95 | utils/swift_build_support/swift_build_support/diagnostics.py | utils/swift_build_support/swift_build_support/diagnostics.py | # swift_build_support/diagnostics.py - Diagnostic Utilities -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from __future__ import print_function
import sys
def note(message):
"""
note(message)
Print a diagnostic notification to the standard output.
"""
print(sys.argv[0] + ": note: " + message)
sys.stdout.flush()
def fatal(message):
"""
fatal(message)
Raise a fatal error.
"""
raise SystemExit(sys.argv[0] + ": fatal error: " + message)
| # swift_build_support/diagnostics.py - Diagnostic Utilities -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
from __future__ import print_function
import sys
def note(message):
"""
note(message)
Print a diagnostic notification to the standard error stream.
"""
print(sys.argv[0] + ": note: " + message, file=sys.stderr)
sys.stderr.flush()
def fatal(message):
"""
fatal(message)
Raise a fatal error.
"""
raise SystemExit(sys.argv[0] + ": fatal error: " + message)
| Print build-script notes to stderr | Print build-script notes to stderr
This makes it easier to ignore them programmatically.
| Python | apache-2.0 | xwu/swift,aschwaighofer/swift,karwa/swift,sschiau/swift,shajrawi/swift,airspeedswift/swift,hooman/swift,devincoughlin/swift,stephentyrone/swift,jmgc/swift,rudkx/swift,rudkx/swift,xwu/swift,allevato/swift,xedin/swift,allevato/swift,apple/swift,benlangmuir/swift,atrick/swift,parkera/swift,tkremenek/swift,hooman/swift,nathawes/swift,devincoughlin/swift,CodaFi/swift,karwa/swift,tkremenek/swift,lorentey/swift,gribozavr/swift,lorentey/swift,xwu/swift,tkremenek/swift,shahmishal/swift,atrick/swift,xedin/swift,shahmishal/swift,JGiola/swift,gregomni/swift,apple/swift,apple/swift,CodaFi/swift,devincoughlin/swift,nathawes/swift,ahoppen/swift,shajrawi/swift,rudkx/swift,sschiau/swift,nathawes/swift,jmgc/swift,xedin/swift,airspeedswift/swift,lorentey/swift,apple/swift,JGiola/swift,harlanhaskins/swift,CodaFi/swift,xedin/swift,sschiau/swift,nathawes/swift,roambotics/swift,roambotics/swift,gregomni/swift,gribozavr/swift,shajrawi/swift,gribozavr/swift,lorentey/swift,allevato/swift,aschwaighofer/swift,xwu/swift,shahmishal/swift,jmgc/swift,JGiola/swift,JGiola/swift,shajrawi/swift,jckarter/swift,gregomni/swift,aschwaighofer/swift,devincoughlin/swift,stephentyrone/swift,shajrawi/swift,ahoppen/swift,harlanhaskins/swift,xwu/swift,gregomni/swift,stephentyrone/swift,xedin/swift,jckarter/swift,ahoppen/swift,devincoughlin/swift,gribozavr/swift,hooman/swift,glessard/swift,benlangmuir/swift,sschiau/swift,sschiau/swift,shajrawi/swift,apple/swift,stephentyrone/swift,devincoughlin/swift,jmgc/swift,stephentyrone/swift,parkera/swift,sschiau/swift,jmgc/swift,karwa/swift,harlanhaskins/swift,xwu/swift,allevato/swift,aschwaighofer/swift,benlangmuir/swift,benlangmuir/swift,stephentyrone/swift,CodaFi/swift,devincoughlin/swift,atrick/swift,CodaFi/swift,JGiola/swift,rudkx/swift,harlanhaskins/swift,ahoppen/swift,roambotics/swift,karwa/swift,harlanhaskins/swift,benlangmuir/swift,shahmishal/swift,tkremenek/swift,allevato/swift,gregomni/swift,hooman/swift,xedin/swift,shahmishal/swift,aschwaighofer/swift,devincoughlin/swift,gribozavr/swift,jckarter/swift,airspeedswift/swift,tkremenek/swift,ahoppen/swift,lorentey/swift,apple/swift,hooman/swift,xwu/swift,allevato/swift,harlanhaskins/swift,xedin/swift,nathawes/swift,karwa/swift,nathawes/swift,jmgc/swift,ahoppen/swift,nathawes/swift,parkera/swift,shahmishal/swift,parkera/swift,glessard/swift,shahmishal/swift,parkera/swift,sschiau/swift,shajrawi/swift,lorentey/swift,benlangmuir/swift,jckarter/swift,shahmishal/swift,xedin/swift,stephentyrone/swift,harlanhaskins/swift,glessard/swift,karwa/swift,lorentey/swift,parkera/swift,roambotics/swift,CodaFi/swift,rudkx/swift,tkremenek/swift,CodaFi/swift,gribozavr/swift,aschwaighofer/swift,airspeedswift/swift,hooman/swift,lorentey/swift,allevato/swift,gribozavr/swift,atrick/swift,karwa/swift,glessard/swift,roambotics/swift,glessard/swift,sschiau/swift,jckarter/swift,atrick/swift,rudkx/swift,jckarter/swift,jmgc/swift,hooman/swift,aschwaighofer/swift,atrick/swift,gregomni/swift,tkremenek/swift,parkera/swift,glessard/swift,airspeedswift/swift,airspeedswift/swift,gribozavr/swift,shajrawi/swift,parkera/swift,JGiola/swift,jckarter/swift,airspeedswift/swift,roambotics/swift,karwa/swift | ---
+++
@@ -19,10 +19,10 @@
"""
note(message)
- Print a diagnostic notification to the standard output.
+ Print a diagnostic notification to the standard error stream.
"""
- print(sys.argv[0] + ": note: " + message)
- sys.stdout.flush()
+ print(sys.argv[0] + ": note: " + message, file=sys.stderr)
+ sys.stderr.flush()
def fatal(message): |
bceaa1ef82004076640381619ebc399513e83128 | genes/intellij/main.py | genes/intellij/main.py | from genes.brew import commands as brew
from genes.curl.commands import download
from genes.debian.traits import is_debian
from genes.directory import DirectoryBuilder
from genes.mac.traits import is_osx
from genes.tar.commands import untar
from genes.ubuntu.traits import is_ubuntu
def main():
if is_debian() or is_ubuntu():
download(
"https://download.jetbrains.com/idea/ideaIU-15.0.tar.gz",
"/tmp/ideas.tar.gz"
)
DirectoryBuilder('/opt/intellij-ideas').\
set_mode('755').\
set_group('root').\
set_user('root').\
build()
untar('/tmp/ideas.tar.gz', '/opt/intellij-ideas')
if is_osx():
brew.update()
brew.cask_install('intellij-idea')
else:
pass
| from genes.brew import commands as brew
from genes.curl.commands import download
from genes.debian.traits import is_debian
from genes import directory
from genes.directory import DirectoryConfig
from genes.mac.traits import is_osx
from genes.tar.commands import untar
from genes.ubuntu.traits import is_ubuntu
def main():
if is_debian() or is_ubuntu():
download(
"https://download.jetbrains.com/idea/ideaIU-15.0.tar.gz",
"/tmp/ideas.tar.gz"
)
def config_directory()
return DirectoryConfig(
path='/opt/intellij-ideas',
mode='755',
group='root',
user='root',
)
# FIXME: Need to find a way to handle errors here
direcotry.main(config_directory)
untar('/tmp/ideas.tar.gz', '/opt/intellij-ideas')
if is_osx():
brew.update()
brew.cask_install('intellij-idea')
else:
pass
| Change to directory config method | Change to directory config method | Python | mit | hatchery/Genepool2,hatchery/genepool | ---
+++
@@ -1,7 +1,8 @@
from genes.brew import commands as brew
from genes.curl.commands import download
from genes.debian.traits import is_debian
-from genes.directory import DirectoryBuilder
+from genes import directory
+from genes.directory import DirectoryConfig
from genes.mac.traits import is_osx
from genes.tar.commands import untar
from genes.ubuntu.traits import is_ubuntu
@@ -13,11 +14,15 @@
"https://download.jetbrains.com/idea/ideaIU-15.0.tar.gz",
"/tmp/ideas.tar.gz"
)
- DirectoryBuilder('/opt/intellij-ideas').\
- set_mode('755').\
- set_group('root').\
- set_user('root').\
- build()
+ def config_directory()
+ return DirectoryConfig(
+ path='/opt/intellij-ideas',
+ mode='755',
+ group='root',
+ user='root',
+ )
+ # FIXME: Need to find a way to handle errors here
+ direcotry.main(config_directory)
untar('/tmp/ideas.tar.gz', '/opt/intellij-ideas')
if is_osx():
brew.update() |
029bd1c15a489ab8833ffaff5130995bf4d31c5a | tests/test_auth.py | tests/test_auth.py | # -*- coding: utf-8 *-*
import logging
import unittest
from mongolog import MongoHandler
try:
from pymongo import MongoClient as Connection
except ImportError:
from pymongo import Connection
class TestAuth(unittest.TestCase):
def setUp(self):
""" Create an empty database that could be used for logging """
self.db_name = '_mongolog_auth'
self.collection_name = 'log'
self.user_name = 'MyUsername'
self.password = 'MySeCrEtPaSsWoRd'
self.conn = Connection()
self.db = self.conn[self.db_name]
self.collection = self.db[self.collection_name]
self.conn.drop_database(self.db_name)
self.db.command(
'createUser',
self.user_name,
pwd=self.password
)
def tearDown(self):
""" Drop used database """
self.conn.drop_database(self.db_name)
def testAuthentication(self):
""" Logging example with authentication """
log = logging.getLogger('authentication')
log.addHandler(MongoHandler(self.collection_name, self.db_name,
username=self.user_name,
password=self.password))
log.error('test')
message = self.collection.find_one({'levelname': 'ERROR',
'msg': 'test'})
self.assertEqual(message['msg'], 'test')
| # -*- coding: utf-8 *-*
import logging
import unittest
from mongolog import MongoHandler
try:
from pymongo import MongoClient as Connection
except ImportError:
from pymongo import Connection
class TestAuth(unittest.TestCase):
def setUp(self):
""" Create an empty database that could be used for logging """
self.db_name = '_mongolog_auth'
self.collection_name = 'log'
self.user_name = 'MyUsername'
self.password = 'MySeCrEtPaSsWoRd'
self.conn = Connection()
self.db = self.conn[self.db_name]
self.collection = self.db[self.collection_name]
self.conn.drop_database(self.db_name)
self.db.command(
'createUser',
self.user_name,
pwd=self.password,
roles=["readWrite"]
)
def tearDown(self):
""" Drop used database """
self.conn.drop_database(self.db_name)
def testAuthentication(self):
""" Logging example with authentication """
log = logging.getLogger('authentication')
log.addHandler(MongoHandler(self.collection_name, self.db_name,
username=self.user_name,
password=self.password))
log.error('test')
message = self.collection.find_one({'levelname': 'ERROR',
'msg': 'test'})
self.assertEqual(message['msg'], 'test')
| Add roles argument for createUser command. | Add roles argument for createUser command.
| Python | bsd-2-clause | puentesarrin/mongodb-log,puentesarrin/mongodb-log | ---
+++
@@ -27,7 +27,8 @@
self.db.command(
'createUser',
self.user_name,
- pwd=self.password
+ pwd=self.password,
+ roles=["readWrite"]
)
def tearDown(self): |
ff12421cc6c3067bac11ece75cf4a16d11859ed0 | tests/test_envs.py | tests/test_envs.py | import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name): # pragma: no cover
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e:
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
| import gym
import pytest
# Import for side-effect of registering environment
import imitation.examples.airl_envs # noqa: F401
import imitation.examples.model_envs # noqa: F401
ENV_NAMES = [env_spec.id for env_spec in gym.envs.registration.registry.all()
if env_spec.id.startswith('imitation/')]
@pytest.mark.parametrize("env_name", ENV_NAMES)
def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else:
raise
env.reset()
obs_space = env.observation_space
for _ in range(4):
act = env.action_space.sample()
obs, rew, done, info = env.step(act)
assert obs in obs_space
| Move the pragma: nocover to except block | Move the pragma: nocover to except block
| Python | mit | HumanCompatibleAI/imitation,humancompatibleai/imitation,humancompatibleai/imitation,HumanCompatibleAI/imitation | ---
+++
@@ -10,11 +10,11 @@
@pytest.mark.parametrize("env_name", ENV_NAMES)
-def test_envs(env_name): # pragma: no cover
+def test_envs(env_name):
"""Check that our custom environments don't crash on `step`, and `reset`."""
try:
env = gym.make(env_name)
- except gym.error.DependencyNotInstalled as e:
+ except gym.error.DependencyNotInstalled as e: # pragma: nocover
if e.args[0].find('mujoco_py') != -1:
pytest.skip("Requires `mujoco_py`, which isn't installed.")
else: |
011ad6090e183ce359c0a74bbd2f2530e1d5178c | tests/test_repr.py | tests/test_repr.py | """ Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT:
pass
else:
assert False, 'TIMEOUT exception expected. No exception aised.'
| """ Test __str__ methods. """
import pexpect
from . import PexpectTestCase
class TestCaseMisc(PexpectTestCase.PexpectTestCase):
def test_str_spawnu(self):
""" Exercise spawnu.__str__() """
# given,
p = pexpect.spawnu('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_spawn(self):
""" Exercise spawn.__str__() """
# given,
p = pexpect.spawn('cat')
# exercise,
value = str(p)
# verify
assert isinstance(value, str)
def test_str_before_spawn(self):
""" Exercise derived spawn.__str__() """
# given,
child = pexpect.spawn(None, None)
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
except pexpect.TIMEOUT as e:
str(e) # Smoketest
else:
assert False, 'TIMEOUT exception expected. No exception raised.'
| Check error repr can be str-ed | Check error repr can be str-ed
| Python | isc | nodish/pexpect,nodish/pexpect,nodish/pexpect | ---
+++
@@ -31,7 +31,7 @@
child.read_nonblocking = lambda size, timeout: b''
try:
child.expect('alpha', timeout=0.1)
- except pexpect.TIMEOUT:
- pass
+ except pexpect.TIMEOUT as e:
+ str(e) # Smoketest
else:
- assert False, 'TIMEOUT exception expected. No exception aised.'
+ assert False, 'TIMEOUT exception expected. No exception raised.' |
2234cbdc78e81329c4110f4eb4e69f429d9b6fb5 | csvkit/convert/dbase.py | csvkit/convert/dbase.py | #!/usr/bin/env python
from cStringIO import StringIO
import dbf
from csvkit import table
def dbf2csv(f, **kwargs):
"""
Convert a dBASE .dbf file to csv.
"""
db = dbf.Table(f.name)
headers = db.field_names
column_ids = range(len(headers))
data_columns = [[] for c in headers]
for row in db:
for i, d in enumerate(row):
try:
data_columns[i].append(unicode(row[column_ids[i]]).strip())
except IndexError:
# Non-rectangular data is truncated
break
columns = []
for i, c in enumerate(data_columns):
columns.append(table.Column(column_ids[i], headers[i], c))
tab = table.Table(columns=columns)
o = StringIO()
output = tab.to_csv(o)
output = o.getvalue()
o.close()
return output
| #!/usr/bin/env python
from cStringIO import StringIO
import dbf
from csvkit import table
def dbf2csv(f, **kwargs):
"""
Convert a dBASE .dbf file to csv.
"""
with dbf.Table(f.name) as db:
headers = db.field_names
column_ids = range(len(headers))
data_columns = [[] for c in headers]
for row in db:
for i, d in enumerate(row):
try:
data_columns[i].append(unicode(row[column_ids[i]]).strip())
except IndexError:
# Non-rectangular data is truncated
break
columns = []
for i, c in enumerate(data_columns):
columns.append(table.Column(column_ids[i], headers[i], c))
tab = table.Table(columns=columns)
o = StringIO()
output = tab.to_csv(o)
output = o.getvalue()
o.close()
return output
| Fix for bug in latest dbf module. Pypy passes now. | Fix for bug in latest dbf module. Pypy passes now.
| Python | mit | bradparks/csvkit__query_join_filter_CSV_cli,matterker/csvkit,unpingco/csvkit,kyeoh/csvkit,Jobava/csvkit,snuggles08/csvkit,dannguyen/csvkit,cypreess/csvkit,jpalvarezf/csvkit,archaeogeek/csvkit,gepuro/csvkit,haginara/csvkit,barentsen/csvkit,bmispelon/csvkit,wjr1985/csvkit,KarrieK/csvkit,onyxfish/csvkit,wireservice/csvkit,moradology/csvkit,aequitas/csvkit,doganmeh/csvkit,themiurgo/csvkit,Tabea-K/csvkit,elcritch/csvkit,tlevine/csvkit,metasoarous/csvkit,reubano/csvkit,nriyer/csvkit,arowla/csvkit | ---
+++
@@ -9,35 +9,33 @@
def dbf2csv(f, **kwargs):
"""
Convert a dBASE .dbf file to csv.
+ """
+ with dbf.Table(f.name) as db:
+ headers = db.field_names
- """
- db = dbf.Table(f.name)
+ column_ids = range(len(headers))
- headers = db.field_names
+ data_columns = [[] for c in headers]
- column_ids = range(len(headers))
+ for row in db:
+ for i, d in enumerate(row):
+ try:
+ data_columns[i].append(unicode(row[column_ids[i]]).strip())
+ except IndexError:
+ # Non-rectangular data is truncated
+ break
- data_columns = [[] for c in headers]
+ columns = []
- for row in db:
- for i, d in enumerate(row):
- try:
- data_columns[i].append(unicode(row[column_ids[i]]).strip())
- except IndexError:
- # Non-rectangular data is truncated
- break
+ for i, c in enumerate(data_columns):
+ columns.append(table.Column(column_ids[i], headers[i], c))
- columns = []
+ tab = table.Table(columns=columns)
- for i, c in enumerate(data_columns):
- columns.append(table.Column(column_ids[i], headers[i], c))
+ o = StringIO()
+ output = tab.to_csv(o)
+ output = o.getvalue()
+ o.close()
- tab = table.Table(columns=columns)
+ return output
- o = StringIO()
- output = tab.to_csv(o)
- output = o.getvalue()
- o.close()
-
- return output
- |
61072f0054abcb50caa813fc35eff194be64727b | src/icecast_parser.py | src/icecast_parser.py | from bs4 import BeautifulSoup
from requests.auth import HTTPBasicAuth
import requests
import json
def parse_content():
rs = requests.get('http://soundspectra.com/admin/', auth=HTTPBasicAuth('admin', 'h@ckm3'))
html_data = rs.text
soup = BeautifulSoup(html_data)
details = {'stream_details' : []}
details_list = []
alt = 1
key = None
for td in soup.find_all('td'):
if alt:
if td.get('width') and td.get('width') == '130':
key = td.text
alt = not alt
elif not alt:
if td.get('class') and td.get('class')[0] == 'streamdata':
alt = not alt
value = td.text
d = {key.encode("utf-8") : value.encode("utf-8")}
details_list.append(d)
details['stream_details'] = details_list
print details
if __name__ == '__main__':
parse_content() | from bs4 import BeautifulSoup
from requests.auth import HTTPBasicAuth
import requests
import json
def parse_content():
rs = requests.get('http://soundspectra.com/admin/', auth=HTTPBasicAuth('admin', 'h@ckm3'))
html_data = rs.text
soup = BeautifulSoup(html_data)
details = {'stream_details' : []}
details_list = []
alt = 1
key = None
for td in soup.find_all('td'):
if alt:
if td.get('width') and td.get('width') == '130':
key = td.text
alt = not alt
elif not alt:
if td.get('class') and td.get('class')[0] == 'streamdata':
alt = not alt
value = td.text
d = {key.encode("utf-8") : value.encode("utf-8")}
details_list.append(d)
details['stream_details'] = details_list
return details
if __name__ == '__main__':
details = parse_content()
print details
| Return the json from the parser method | Return the json from the parser method
| Python | apache-2.0 | ekholabs/icecast_scraper | ---
+++
@@ -29,7 +29,8 @@
details_list.append(d)
details['stream_details'] = details_list
- print details
+ return details
if __name__ == '__main__':
- parse_content()
+ details = parse_content()
+ print details |
4eff72987144a31ca1dee922a755adc8a5efefb8 | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter4, a code checking framework for Sublime Text 3
#
# Written by Markus Liljedahl
# Copyright (c) 2017 Markus Liljedahl
#
# License: MIT
#
"""This module exports the AnsibleLint plugin class."""
from SublimeLinter.lint import Linter, util
class AnsibleLint(Linter):
"""Provides an interface to ansible-lint."""
# ansbile-lint verison requirements check
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.0.1'
# linter settings
cmd = ('ansible-lint', '${args}', '@')
regex = r'^.+:(?P<line>\d+): \[.(?P<error>.+)\] (?P<message>.+)'
# -p generate non-multi-line, pep8 compatible output
multiline = False
# ansible-lint does not support column number
word_re = False
line_col_base = (1, 1)
tempfile_suffix = 'yml'
error_stream = util.STREAM_STDOUT
defaults = {
'selector': 'source.ansible',
'args': '--nocolor -p',
'--exclude= +': ['.galaxy'],
'-c': '',
'-r': '',
'-R': '',
'-t': '',
'-x': '',
}
inline_overrides = ['c', 'exclude', 'r', 'R', 't', 'x']
| #
# linter.py
# Linter for SublimeLinter4, a code checking framework for Sublime Text 3
#
# Written by Markus Liljedahl
# Copyright (c) 2017 Markus Liljedahl
#
# License: MIT
#
"""This module exports the AnsibleLint plugin class."""
from SublimeLinter.lint import Linter, util
class AnsibleLint(Linter):
"""Provides an interface to ansible-lint."""
# ansbile-lint verison requirements check
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 3.0.1'
# linter settings
cmd = ('ansible-lint', '${args}', '${file}')
regex = r'^.+:(?P<line>\d+): \[.(?P<error>.+)\] (?P<message>.+)'
# -p generate non-multi-line, pep8 compatible output
multiline = False
# ansible-lint does not support column number
word_re = False
line_col_base = (1, 1)
tempfile_suffix = 'yml'
error_stream = util.STREAM_STDOUT
defaults = {
'selector': 'source.ansible',
'args': '--nocolor -p',
'--exclude= +': ['.galaxy'],
'-c': '',
'-r': '',
'-R': '',
'-t': '',
'-x': '',
}
inline_overrides = ['c', 'exclude', 'r', 'R', 't', 'x']
| Replace @ with , otherwise there is blank output from ansible-lint | Replace @ with , otherwise there is blank output from ansible-lint
| Python | mit | mliljedahl/SublimeLinter-contrib-ansible-lint | ---
+++
@@ -22,7 +22,7 @@
version_requirement = '>= 3.0.1'
# linter settings
- cmd = ('ansible-lint', '${args}', '@')
+ cmd = ('ansible-lint', '${args}', '${file}')
regex = r'^.+:(?P<line>\d+): \[.(?P<error>.+)\] (?P<message>.+)'
# -p generate non-multi-line, pep8 compatible output
multiline = False |
b3e892f476c743a6ed2e2518fd1c9c2bec4d56ae | invocations/testing.py | invocations/testing.py | from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'.",
'opts': "Extra flags for the test runner",
'pty': "Whether to run tests under a pseudo-tty",
})
def test(c, module=None, runner=None, opts=None, pty=True):
"""
Run a Spec or Nose-powered internal test suite.
"""
runner = runner or 'spec'
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
if opts:
args += " " + opts
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty)
| from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'.",
'opts': "Extra flags for the test runner",
'pty': "Whether to run tests under a pseudo-tty",
})
def test(c, module=None, runner=None, opts=None, pty=True):
"""
Run a Spec or Nose-powered internal test suite.
"""
runner = runner or 'spec'
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
if opts:
args += " " + opts
# Always enable timing info by default. OPINIONATED
args += " --with-timing"
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty)
| Use spec 1.1 --with-timing option | Use spec 1.1 --with-timing option
| Python | bsd-2-clause | mrjmad/invocations,singingwolfboy/invocations,pyinvoke/invocations | ---
+++
@@ -17,5 +17,7 @@
args = (specific_module if module else "")
if opts:
args += " " + opts
+ # Always enable timing info by default. OPINIONATED
+ args += " --with-timing"
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty) |
f495ecb5f9131c2c13c41e78cc3fc2e182bdc8fc | hotline/db/db_redis.py | hotline/db/db_redis.py | import os
import redis
from urllib.parse import urlparse
redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
redis_url_parse = urlparse(redis_url)
redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
| from db.db_abstract import AbstractClient
from redis import StrictRedis
from urllib.parse import urlparse
class RedisClient(AbstractClient):
def __init__(self, url):
self.url = url
self.client = None
def connect(self):
redis_url = urlparse(self.url)
self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
def get(self, **kwargs):
pass
def set(self, **kwargs):
pass
def update(self, **kwargs):
pass
def delete(self, **kwargs):
pass
| Update to inherit from abstract class | Update to inherit from abstract class
| Python | mit | wearhacks/hackathon_hotline | ---
+++
@@ -1,10 +1,25 @@
-import os
-import redis
-
+from db.db_abstract import AbstractClient
+from redis import StrictRedis
from urllib.parse import urlparse
-redis_url = os.environ.get('REDISCLOUD_URL', 'redis://localhost:6379')
-redis_url_parse = urlparse(redis_url)
-redis_client = redis.StrictRedis(host=redis_url_parse.hostname, port=redis_url_parse.port)
+class RedisClient(AbstractClient):
+ def __init__(self, url):
+ self.url = url
+ self.client = None
+ def connect(self):
+ redis_url = urlparse(self.url)
+ self.client = StrictRedis(host=url.hostname, port=url.port, password=url.password)
+
+ def get(self, **kwargs):
+ pass
+
+ def set(self, **kwargs):
+ pass
+
+ def update(self, **kwargs):
+ pass
+
+ def delete(self, **kwargs):
+ pass |
71b72c3f09af86da83a027502d28c9649db1cf86 | kai/controllers/tracs.py | kai/controllers/tracs.py | import logging
from pylons import config, tmpl_context as c
from pylons.controllers.util import abort
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
except:
pass
from kai.lib.base import BaseController
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
return trac_app(environ, start_response)
| import logging
from pylons import config, tmpl_context as c
from pylons.controllers.util import abort
# Monkey patch the lazywriter, since mercurial needs that on the stdout
import paste.script.serve as serve
serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try:
import os
os.environ['TRAC_ENV_PARENT_DIR'] = '/usr/local/www'
os.environ['PYTHON_EGG_CACHE'] = os.path.join(config['pylons.paths']['root'], 'egg_cache')
import trac.web.main
trac_app = trac.web.main.dispatch_request
except:
pass
from kai.lib.base import BaseController
log = logging.getLogger(__name__)
class TracsController(BaseController):
def run_app(self, environ, start_response):
if not trac_app:
abort(404)
if c.user:
environ['REMOTE_USER'] = c.user.displayname
return trac_app(environ, start_response)
| Add monkey patch for mercurial trac | Add monkey patch for mercurial trac
| Python | bsd-3-clause | Pylons/kai,Pylons/kai | ---
+++
@@ -2,6 +2,10 @@
from pylons import config, tmpl_context as c
from pylons.controllers.util import abort
+
+# Monkey patch the lazywriter, since mercurial needs that on the stdout
+import paste.script.serve as serve
+serve.LazyWriter.closed = False
# Conditionally import the trac components in case things trac isn't installed
try: |
48213f561c802e5279770cc833a9a5a68575bf72 | inventory.py | inventory.py | from flask import Flask, render_template, url_for, redirect
from flask import session, escape, request
from peewee import *
#from datetime import date
app = Flask(__name__)
# http://docs.peewee-orm.com/en/latest/peewee/quickstart.html
database = SqliteDatabase('developmentData.db')
#class Device(Model):
# idNumber = IntField()
# serialNumber = CharField()
# typeCategory = CharField()
# description = TextField()
# issues = TextField()
# photo = CharField()
# quality = CharField()
@app.route('/')
def index():
# http://flask.pocoo.org/snippets/15/
if 'username' in session:
return render_template('inventory.html', inventoryData="", deviceLogData="")
return redirect(url_for('login'));
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
session['username'] = request.form['username']
return redirect(url_for('index'))
return render_template('login.html')
if __name__ == '__main__':
db.connect()
app.run()
| from flask import Flask, render_template, url_for, redirect
from flask import session, escape, request
from peewee import *
#from datetime import date
app = Flask(__name__)
# http://docs.peewee-orm.com/en/latest/peewee/quickstart.html
database = SqliteDatabase('developmentData.db')
#class Device(Model):
# idNumber = IntField()
# serialNumber = CharField()
# typeCategory = CharField()
# description = TextField()
# issues = TextField()
# photo = CharField()
# quality = CharField()
@app.route('/')
def index():
# http://flask.pocoo.org/snippets/15/
if 'username' in session:
return render_template('inventory.html', inventoryData="", deviceLogData="")
return redirect(url_for('login'));
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
#session['username'] = request.form['username']
return "success"#redirect(url_for('index'))
return render_template('login.html')
if __name__ == '__main__':
db.connect()
app.run()
| Add debug code to test login post | Add debug code to test login post
| Python | mit | lcdi/Inventory,lcdi/Inventory,lcdi/Inventory,lcdi/Inventory | ---
+++
@@ -26,8 +26,8 @@
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'POST':
- session['username'] = request.form['username']
- return redirect(url_for('index'))
+ #session['username'] = request.form['username']
+ return "success"#redirect(url_for('index'))
return render_template('login.html')
|
82d5276b6c9164e4b8bffe74dc3068ed3e6a967e | main.py | main.py | from jsonrpc import JSONRPCResponseManager
from funcs import d
def app(environ, start_response):
if 'POST'!=environ.get('REQUEST_METHOD'):
if 'OPTIONS'==environ.get('REQUEST_METHOD'):
start_response('200 OK',[('Access-Control-Allow-Origin','*'), ('Access-Control-Allow-Methods', 'POST')])
yield b''
else:
start_response('405 Method Not Allowed',[('Content-Type','text/plain')])
yield b'405 Method Not Allowed'
else:
j=JSONRPCResponseManager.handle(environ['wsgi.input'].read().decode(), d)
if j:
start_response('200 OK',[('Content-Type','application/json'), ('Access-Control-Allow-Origin','*')])
yield j.json.encode()
else:
start_response('204 No Content',[('Access-Control-Allow-Origin','*')])
yield b'' | from jsonrpc import JSONRPCResponseManager
from funcs import d
def app(environ, start_response):
if 'POST'!=environ.get('REQUEST_METHOD'):
if 'OPTIONS'==environ.get('REQUEST_METHOD'):
start_response('200 OK',[
('Access-Control-Allow-Origin','*'),
('Access-Control-Allow-Methods', 'POST'),
('Access-Control-Allow-Headers', 'Content-Type')])
yield b''
else:
start_response('405 Method Not Allowed',[('Content-Type','text/plain')])
yield b'405 Method Not Allowed'
else:
j=JSONRPCResponseManager.handle(environ['wsgi.input'].read().decode(), d)
if j:
start_response('200 OK',[('Content-Type','application/json'), ('Access-Control-Allow-Origin','*')])
yield j.json.encode()
else:
start_response('204 No Content',[('Access-Control-Allow-Origin','*')])
yield b'' | Add allowed headers to preflight response. | Add allowed headers to preflight response. | Python | mit | 1stop-st/jsonrpc-calculator | ---
+++
@@ -4,7 +4,10 @@
def app(environ, start_response):
if 'POST'!=environ.get('REQUEST_METHOD'):
if 'OPTIONS'==environ.get('REQUEST_METHOD'):
- start_response('200 OK',[('Access-Control-Allow-Origin','*'), ('Access-Control-Allow-Methods', 'POST')])
+ start_response('200 OK',[
+ ('Access-Control-Allow-Origin','*'),
+ ('Access-Control-Allow-Methods', 'POST'),
+ ('Access-Control-Allow-Headers', 'Content-Type')])
yield b''
else:
start_response('405 Method Not Allowed',[('Content-Type','text/plain')]) |
793332d42c6568a79b321a1474f7aca6834e082e | main.py | main.py | #!/usr/bin/env python3
from flask import Flask
from flask import request
from flask import jsonify
from utils import download_file, create_temp_dir, zip_up_dir, delete_dir
from annotations import sort_annotations_by_time
from videoeditor import bake_annotations
app = Flask(__name__)
@app.route("/", methods=["POST"])
def index():
request_json = request.get_json()
# Allow both plain JSON objects and arrays
if type(request_json) is dict:
request_json = [request_json]
for video_json in request_json:
export_dir_name = create_temp_dir()
video_uri = video_json["videoUri"]
video_filename = video_uri.rsplit("/")[-1]
video_location = download_file(video_uri, "video-cache/" + video_filename)
sorted_annotations = sort_annotations_by_time(video_json["annotations"])
bake_annotations(video_location, export_dir_name + '/' + video_filename, sorted_annotations)
zip_up_dir(export_dir_name, 'video-exports/' + video_json["title"])
delete_dir(export_dir_name)
return jsonify({"message": "Annotated video created succesfully"})
if __name__ == "__main__":
app.run(debug=True)
| #!/usr/bin/env python3
from flask import Flask
from flask import request
from flask import jsonify
from utils import download_file, create_temp_dir, zip_up_dir, delete_dir
from annotations import sort_annotations_by_time
from videoeditor import bake_annotations
app = Flask(__name__)
@app.route("/", methods=["POST"])
def index():
request_json = request.get_json()
# Allow both plain JSON objects and arrays
if type(request_json) is dict:
request_json = [request_json]
for video_json in request_json:
export_dir_name = create_temp_dir()
video_uri = video_json["videoUri"]
video_filename = video_uri.rsplit("/")[-1]
video_location = download_file(video_uri, "video-cache/" + video_filename)
sorted_annotations = sort_annotations_by_time(video_json["annotations"])
bake_annotations(video_location, export_dir_name + '/' + video_filename, sorted_annotations)
zip_up_dir(export_dir_name, 'video-exports/' + video_json["title"])
delete_dir(export_dir_name)
return jsonify({"message": "Annotated video created successfully"})
if __name__ == "__main__":
app.run(debug=True)
| Fix typo in output json message | Fix typo in output json message
| Python | mit | melonmanchan/achso-video-exporter,melonmanchan/achso-video-exporter | ---
+++
@@ -31,8 +31,7 @@
zip_up_dir(export_dir_name, 'video-exports/' + video_json["title"])
delete_dir(export_dir_name)
-
- return jsonify({"message": "Annotated video created succesfully"})
+ return jsonify({"message": "Annotated video created successfully"})
if __name__ == "__main__":
app.run(debug=True) |
dcdfd994f1ab79a5fd8e50e7bf478100211a77aa | oscar_vat_moss/checkout/session.py | oscar_vat_moss/checkout/session.py | from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from oscar.apps.checkout import session, exceptions
from oscar_vat_moss import vat
class CheckoutSessionMixin(session.CheckoutSessionMixin):
def build_submission(self, **kwargs):
submission = super(CheckoutSessionMixin, self).build_submission(
**kwargs)
assess_tax = (submission['shipping_method']
and submission['shipping_address']
and submission['shipping_address'].phone_number)
if assess_tax:
try:
vat.apply_to(submission)
except vat.VATAssessmentException as e:
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-address'),
message=_(str(e))
)
# Recalculate order total to ensure we have a tax-inclusive total
submission['order_total'] = self.get_order_totals(
submission['basket'], submission['shipping_charge'])
return submission
def get_context_data(self, **kwargs):
ctx = super(CheckoutSessionMixin, self).get_context_data(**kwargs)
# Oscar's checkout templates look for this variable which specifies to
# break out the tax totals into a separate subtotal.
ctx['show_tax_separately'] = True
return ctx
| from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from oscar.apps.checkout import session, exceptions
from oscar_vat_moss import vat
class CheckoutSessionMixin(session.CheckoutSessionMixin):
def build_submission(self, **kwargs):
submission = super(CheckoutSessionMixin, self).build_submission(
**kwargs)
assess_tax = (submission['shipping_method']
and submission['shipping_address']
and submission['shipping_address'].phone_number)
if assess_tax:
try:
vat.apply_to(submission)
except vat.VATAssessmentException as e:
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-address'),
message=_(str(e))
)
# Recalculate order total to ensure we have a tax-inclusive total
submission['order_total'] = self.get_order_totals(
submission['basket'], submission['shipping_charge'])
return submission
def check_a_valid_shipping_address_is_captured(self):
super(CheckoutSessionMixin, self)
shipping_address = self.get_shipping_address(
basket=self.request.basket)
try:
vat.lookup_vat_for_shipping_address(shipping_address)
except vat.VATAssessmentException as e:
message = _("%s. Please try again." % str(e))
raise exceptions.FailedPreCondition(
url=reverse('checkout:shipping-address'),
message=message
)
def get_context_data(self, **kwargs):
ctx = super(CheckoutSessionMixin, self).get_context_data(**kwargs)
# Oscar's checkout templates look for this variable which specifies to
# break out the tax totals into a separate subtotal.
ctx['show_tax_separately'] = True
return ctx
| Add a "valid shipping address check" to account for VAT discrepancies | Add a "valid shipping address check" to account for VAT discrepancies
If we get a phone number and a city/country combination that yield
incompatible VAT results, we need to flag this to the user. The best
place to do this is, ironically, the shipping address check.
| Python | bsd-3-clause | hastexo/django-oscar-vat_moss,fghaas/django-oscar-vat_moss,arbrandes/django-oscar-vat_moss,fghaas/django-oscar-vat_moss,hastexo/django-oscar-vat_moss,arbrandes/django-oscar-vat_moss | ---
+++
@@ -27,6 +27,19 @@
submission['basket'], submission['shipping_charge'])
return submission
+ def check_a_valid_shipping_address_is_captured(self):
+ super(CheckoutSessionMixin, self)
+ shipping_address = self.get_shipping_address(
+ basket=self.request.basket)
+ try:
+ vat.lookup_vat_for_shipping_address(shipping_address)
+ except vat.VATAssessmentException as e:
+ message = _("%s. Please try again." % str(e))
+ raise exceptions.FailedPreCondition(
+ url=reverse('checkout:shipping-address'),
+ message=message
+ )
+
def get_context_data(self, **kwargs):
ctx = super(CheckoutSessionMixin, self).get_context_data(**kwargs)
|
6fafe0e2d10229ac68fd5bc7857b938d7cd5b212 | wafer/talks/models.py | wafer/talks/models.py | from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
# constants to make things clearer elsewhere
ACCEPTED = 'A'
PENDING = 'P'
REJECTED = 'R'
class Talk(models.Model):
TALK_STATUS = (
(ACCEPTED, 'Accepted'),
(REJECTED, 'Not Accepted'),
(PENDING, 'Under Consideration'),
)
talk_id = models.AutoField(primary_key=True)
title = models.CharField(max_length=1024)
abstract = models.TextField(
help_text=_("Write two or three paragraphs describing your talk"))
status = models.CharField(max_length=1, choices=TALK_STATUS,
default=PENDING)
corresponding_author = models.ForeignKey(
User, related_name='contact_talks')
authors = models.ManyToManyField(User, related_name='talks')
def __unicode__(self):
return u'%s: %s' % (self.corresponding_author, self.title)
def get_absolute_url(self):
return reverse('wafer_talk', args=(self.talk_id,))
# Helpful properties for the templates
accepted = property(fget=lambda x: x.status == ACCEPTED)
pending = property(fget=lambda x: x.status == PENDING)
reject = property(fget=lambda x: x.status == REJECTED)
| from django.utils.translation import ugettext_lazy as _
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
from wafer.snippets.markdown_field import MarkdownTextField
# constants to make things clearer elsewhere
ACCEPTED = 'A'
PENDING = 'P'
REJECTED = 'R'
class Talk(models.Model):
TALK_STATUS = (
(ACCEPTED, 'Accepted'),
(REJECTED, 'Not Accepted'),
(PENDING, 'Under Consideration'),
)
talk_id = models.AutoField(primary_key=True)
title = models.CharField(max_length=1024)
abstract = MarkdownTextField(
help_text=_("Write two or three paragraphs describing your talk"))
status = models.CharField(max_length=1, choices=TALK_STATUS,
default=PENDING)
corresponding_author = models.ForeignKey(
User, related_name='contact_talks')
authors = models.ManyToManyField(User, related_name='talks')
def __unicode__(self):
return u'%s: %s' % (self.corresponding_author, self.title)
def get_absolute_url(self):
return reverse('wafer_talk', args=(self.talk_id,))
# Helpful properties for the templates
accepted = property(fget=lambda x: x.status == ACCEPTED)
pending = property(fget=lambda x: x.status == PENDING)
reject = property(fget=lambda x: x.status == REJECTED)
| Make the abstract a Markdown field | Make the abstract a Markdown field
| Python | isc | CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer | ---
+++
@@ -2,6 +2,8 @@
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
+
+from wafer.snippets.markdown_field import MarkdownTextField
# constants to make things clearer elsewhere
@@ -22,7 +24,7 @@
title = models.CharField(max_length=1024)
- abstract = models.TextField(
+ abstract = MarkdownTextField(
help_text=_("Write two or three paragraphs describing your talk"))
status = models.CharField(max_length=1, choices=TALK_STATUS, |
42d667c0478b5b500d765e3d70cc03ec7e7d84d3 | main.py | main.py | #!/usr/bin/env python3
from time import sleep
from utils.mqtt import publish
from weather import Weather
w = Weather()
while True:
publish(w.basics(),"stormfly")
sleep(600)
| #!/usr/bin/env python3
from time import sleep
from utils.mqtt import publish
from weather import Weather
w = Weather()
while True:
publish(w.basics(),"stormfly")
sleep(600)
w.refresh()
| FIX refresh data when running in loop | FIX refresh data when running in loop
| Python | mit | paulfantom/AGH-weather-mqtt | ---
+++
@@ -9,3 +9,4 @@
while True:
publish(w.basics(),"stormfly")
sleep(600)
+ w.refresh() |
d6e5b3835c7779a3fde7be7004b3d4975c4b9f1a | xbob/core/__init__.py | xbob/core/__init__.py | from ._convert import convert
from . import log
from . import random
from . import version
from .version import module as __version__
from .version import api as __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
def get_config():
"""Returns a string containing the configuration information.
"""
import pkg_resources
from .version import externals
packages = pkg_resources.require(__name__)
this = packages[0]
deps = packages[1:]
retval = "%s: %s (%s)\n" % (this.key, this.version, this.location)
retval += " - c/c++ dependencies:\n"
for k in sorted(externals): retval += " - %s: %s\n" % (k, externals[k])
retval += " - python dependencies:\n"
for d in deps: retval += " - %s: %s (%s)\n" % (d.key, d.version, d.location)
return retval.strip()
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
| from ._convert import convert
from . import log
from . import random
from . import version
from .version import module as __version__
from .version import api as __api_version__
def get_include():
"""Returns the directory containing the C/C++ API include directives"""
return __import__('pkg_resources').resource_filename(__name__, 'include')
def get_config():
"""Returns a string containing the configuration information.
"""
import pkg_resources
from .version import externals
packages = pkg_resources.require(__name__)
this = packages[0]
deps = packages[1:]
retval = "%s: %s [api=0x%04x] (%s)\n" % (this.key, this.version,
version.api, this.location)
retval += " - c/c++ dependencies:\n"
for k in sorted(externals): retval += " - %s: %s\n" % (k, externals[k])
retval += " - python dependencies:\n"
for d in deps: retval += " - %s: %s (%s)\n" % (d.key, d.version, d.location)
return retval.strip()
# gets sphinx autodoc done right - don't remove it
__all__ = [_ for _ in dir() if not _.startswith('_')]
| Add API number in get_config() | Add API number in get_config()
| Python | bsd-3-clause | tiagofrepereira2012/bob.core,tiagofrepereira2012/bob.core,tiagofrepereira2012/bob.core | ---
+++
@@ -21,7 +21,8 @@
this = packages[0]
deps = packages[1:]
- retval = "%s: %s (%s)\n" % (this.key, this.version, this.location)
+ retval = "%s: %s [api=0x%04x] (%s)\n" % (this.key, this.version,
+ version.api, this.location)
retval += " - c/c++ dependencies:\n"
for k in sorted(externals): retval += " - %s: %s\n" % (k, externals[k])
retval += " - python dependencies:\n" |
5d40ec75cd27e3bf9c5fad2a921db84f0b5d672d | linked_accounts/utils.py | linked_accounts/utils.py | from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from linked_accounts import LINKED_ACCOUNTS_HANDLERS
HANDLERS = getattr(
settings,
'LINKED_ACCOUNTS_HANDLERS',
LINKED_ACCOUNTS_HANDLERS
)
def get_profile(service=None, token=None):
handler_module = dict(HANDLERS).get(service, None)
if handler_module:
module, handler = handler_module.rsplit('.', 1)
handler_class = getattr(import_module(module), handler)
handler = handler_class()
profile = handler.get_profile(token)
return profile.user
else:
raise ImproperlyConfigured('No handler for service %s' % service)
| from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.utils.importlib import import_module
from linked_accounts import LINKED_ACCOUNTS_HANDLERS
HANDLERS = getattr(
settings,
'LINKED_ACCOUNTS_HANDLERS',
LINKED_ACCOUNTS_HANDLERS
)
def get_profile(service=None, token=None):
handler_module = dict(HANDLERS).get(service, None)
if handler_module:
module, handler = handler_module.rsplit('.', 1)
handler_class = getattr(import_module(module), handler)
handler = handler_class()
profile = handler.get_profile(token)
return profile
else:
raise ImproperlyConfigured('No handler for service %s' % service)
| Return profile from get_profile view instead of user | Return profile from get_profile view instead of user
| Python | mit | zen4ever/django-linked-accounts,zen4ever/django-linked-accounts | ---
+++
@@ -19,6 +19,6 @@
handler_class = getattr(import_module(module), handler)
handler = handler_class()
profile = handler.get_profile(token)
- return profile.user
+ return profile
else:
raise ImproperlyConfigured('No handler for service %s' % service) |
693f4f52bfed6d25fc32504fcfc8a57e466533a0 | list/list.py | list/list.py | #!/usr/local/bin/python
#a=[1,2,3,4,5,6,7,8,9,10]
#print a[:3]
#print a[-3:]
#print a[:]
#print a[::2]
#print a[8:3:-1]
#print [1,2,3]+[4,5,6]
#print ["Hi"]*3
#print 1 in a
#print max(a)
#print min(a)
#print len(a)
#print list("Hello")
#b=[1,3,5,7,9,8]
#b[1]=4
#print b
#del b[1]
#print b
c=list("Perl")
c[1:1]=list('ython')
c[-4:]=[]
c.append('n')
print c.count('t')
print c
d=[1,2,3]
d.extend([4,5,6])
print d
e=[7,8,9]
print e+[10,11,12]
print e
| #!/usr/local/bin/python
#a=[1,2,3,4,5,6,7,8,9,10]
#print a[:3]
#print a[-3:]
#print a[:]
#print a[::2]
#print a[8:3:-1]
#print [1,2,3]+[4,5,6]
#print ["Hi"]*3
#print 1 in a
#print max(a)
#print min(a)
#print len(a)
#print list("Hello")
#b=[1,3,5,7,9,8]
#b[1]=4
#print b
#del b[1]
#print b
#c=list("Perl")
#c[1:1]=list('ython')
#c[-4:]=[]
#c.append('n')
#print c.count('t')
#print c
#d=[1,2,3]
#d.extend([4,5,6])
#print d
#e=[7,8,9]
#print e+[10,11,12]
#print e
f=list("abcdefg")
print f.index('c')
f.insert(len(f),'h')
print f
print f.pop()
f.append('h')
print f
f.remove('h')
print f
g=[1,2,3]
g.reverse()
print g
h=[3,4,8,2,6]
i=h[:]
i.sort()
print i
h.sort(reverse=True)
print h
j=['ads','dd','eeeee','asdsadd']
print j.sort(key=len)
print j
| Use sort,pop,remove and so on. | Use sort,pop,remove and so on.
| Python | apache-2.0 | Vayne-Lover/Python | ---
+++
@@ -17,15 +17,36 @@
#print b
#del b[1]
#print b
-c=list("Perl")
-c[1:1]=list('ython')
-c[-4:]=[]
-c.append('n')
-print c.count('t')
-print c
-d=[1,2,3]
-d.extend([4,5,6])
-print d
-e=[7,8,9]
-print e+[10,11,12]
-print e
+#c=list("Perl")
+#c[1:1]=list('ython')
+#c[-4:]=[]
+#c.append('n')
+#print c.count('t')
+#print c
+#d=[1,2,3]
+#d.extend([4,5,6])
+#print d
+#e=[7,8,9]
+#print e+[10,11,12]
+#print e
+f=list("abcdefg")
+print f.index('c')
+f.insert(len(f),'h')
+print f
+print f.pop()
+f.append('h')
+print f
+f.remove('h')
+print f
+g=[1,2,3]
+g.reverse()
+print g
+h=[3,4,8,2,6]
+i=h[:]
+i.sort()
+print i
+h.sort(reverse=True)
+print h
+j=['ads','dd','eeeee','asdsadd']
+print j.sort(key=len)
+print j |
2f6e13a868f18a516f0eb79efa70f3ae527c4aad | tinman/handlers/__init__.py | tinman/handlers/__init__.py | """Custom Tinman Handlers add wrappers to based functionality to speed
application development.
"""
from tinman.handlers.session import SessionRequestHandler
| """Custom Tinman Handlers add wrappers to based functionality to speed
application development.
"""
from tinman.handlers.base import RequestHandler
from tinman.handlers.session import SessionRequestHandler
| Make the base request handler available by default | Make the base request handler available by default
| Python | bsd-3-clause | lucius-feng/tinman,gmr/tinman,lucius-feng/tinman,gmr/tinman,lucius-feng/tinman | ---
+++
@@ -2,4 +2,5 @@
application development.
"""
+from tinman.handlers.base import RequestHandler
from tinman.handlers.session import SessionRequestHandler |
d937e254ce3c806300ac7763e30bd4303661cba6 | whaler/analysis.py | whaler/analysis.py | """
"""
import os
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
# write table as groundstates.out file.
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
| """
"""
import os
import numpy as np
from whaler.dataprep import IO
class Analysis():
"""
"""
def __init__(self):
self.loc = os.getcwd()
self.structs = next(os.walk('.'))[1]
print(self.loc)
print(self.structs)
def groundstates_all(self, outname="groundstates.csv"):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
columns = [] #turn list of rows into list of columns
# write table as groundstates.out file.
writer = IO(outname, self.loc)
headers = np.array(['Structures', 'S', 'T', 'P', 'D', 'Q'])
writer.tabulate_data(columns, headers, 'Structures')
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing
geometries in different spin states. Verifies convergence, and then
finds the final single-point energy for each file. Returns an array of
energies of the various spin states.
Possibilities: S T P D Q (for S = 0, 1, 2, 1/2, 3/2)
"""
| Set up data tabulation for gs | Set up data tabulation for gs
| Python | mit | tristanbrown/whaler | ---
+++
@@ -3,6 +3,7 @@
"""
import os
+import numpy as np
from whaler.dataprep import IO
class Analysis():
@@ -14,13 +15,20 @@
print(self.loc)
print(self.structs)
- def groundstates_all(self):
+ def groundstates_all(self, outname="groundstates.csv"):
"""Compares the energies of each calculated spin state for a structure
and writes the energy differences as a table."""
results = [self.spinstates(struct) for struct in self.structs]
+ columns = [] #turn list of rows into list of columns
# write table as groundstates.out file.
+ writer = IO(outname, self.loc)
+
+
+ headers = np.array(['Structures', 'S', 'T', 'P', 'D', 'Q'])
+
+ writer.tabulate_data(columns, headers, 'Structures')
def spinstates(self, structure):
"""For a given structure, identifies all of the files optimizing |
7d81a2f27c0bf9ab57d046152981c3882016e013 | wordcloud/views.py | wordcloud/views.py | import os
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson
from django.views.decorators.cache import cache_page
from .wordcloud import popular_words
@cache_page(60*60*4)
def wordcloud(request, max_entries=30):
""" Return tag cloud JSON results"""
cache_path = settings.WORDCLOUD_CACHE_PATH
if os.path.exists(cache_path):
with open(cache_path) as cached_file:
content = cached_file.read()
else:
content = simplejson.dumps(popular_words(max_entries=max_entries))
return HttpResponse(
content,
mimetype='application/json',
)
| import os
from django.conf import settings
from django.http import HttpResponse
from django.utils import simplejson
from django.views.decorators.cache import cache_page
from .wordcloud import popular_words
@cache_page(60*60*4)
def wordcloud(request, max_entries=30):
""" Return tag cloud JSON results"""
cache_path = settings.WORDCLOUD_CACHE_PATH
if os.path.exists(cache_path):
response = HttpResponse()
response['Content-Type'] = 'application/json'
response['X-Sendfile'] = cache_path.encode('utf-8')
return response
content = simplejson.dumps(popular_words(max_entries=max_entries))
return HttpResponse(
content,
mimetype='application/json',
)
| Use x-sendfile to serve pre-generated wordcloud JSON. | Use x-sendfile to serve pre-generated wordcloud JSON.
If we've pre-generated the wordcloud JSON file, use x-sendfile to
serve it rather than reading the file in Django.
| Python | agpl-3.0 | geoffkilpin/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,geoffkilpin/pombola,mysociety/pombola,mysociety/pombola | ---
+++
@@ -13,10 +13,12 @@
""" Return tag cloud JSON results"""
cache_path = settings.WORDCLOUD_CACHE_PATH
if os.path.exists(cache_path):
- with open(cache_path) as cached_file:
- content = cached_file.read()
- else:
- content = simplejson.dumps(popular_words(max_entries=max_entries))
+ response = HttpResponse()
+ response['Content-Type'] = 'application/json'
+ response['X-Sendfile'] = cache_path.encode('utf-8')
+ return response
+
+ content = simplejson.dumps(popular_words(max_entries=max_entries))
return HttpResponse(
content, |
cca7e512061948abe05fd25111974f41fa6fb6ec | romanesco/plugins/swift/tests/swift_test.py | romanesco/plugins/swift/tests/swift_test.py | import romanesco
import unittest
class TestSwiftMode(unittest.TestCase):
def testSwiftMode(self):
task = {
'mode': 'swift',
'script': """
type file;
app (file out) echo_app (string s)
{
echo s stdout=filename(out);
}
string a = arg("a", "10");
file out <"out.csv">;
out = echo_app(strcat("a,b,c\\n", a, ",2,3"));
""",
'inputs': [{
'id': 'a',
'format': 'json',
'type': 'number'
}],
'swift_args': ['-a=$input{a}'],
'outputs': [{
'id': 'out.csv',
'type': 'table',
'format': 'csv'
}]
}
inputs = {
'a': {
'format': 'number',
'data': 5
}
}
# Use user-specified filename
out = romanesco.run(task, inputs=inputs)
# We bound _stderr as a task output, so it should be in the output
self.assertEqual(out, {
'out.csv': {
'data': 'a,b,c\n5,2,3\n',
'format': 'csv'
}
})
| import romanesco
import unittest
class TestSwiftMode(unittest.TestCase):
def testSwiftMode(self):
task = {
'mode': 'swift',
'script': """
type file;
app (file out) echo_app (string s)
{
echo s stdout=filename(out);
}
string a = arg("a", "10");
file out <"out.csv">;
out = echo_app(strcat("a,b,c\\n", a, ",2,3"));
""",
'inputs': [{
'id': 'a',
'format': 'json',
'type': 'number'
}],
'swift_args': ['-a=$input{a}'],
'outputs': [{
'id': 'out.csv',
'type': 'table',
'format': 'csv'
}]
}
inputs = {
'a': {
'format': 'number',
'data': 5
}
}
out = romanesco.run(task, inputs=inputs)
self.assertEqual(out, {
'out.csv': {
'data': 'a,b,c\n5,2,3\n',
'format': 'csv'
}
})
| Remove comments that don't make sense | Remove comments that don't make sense
| Python | apache-2.0 | girder/girder_worker,girder/girder_worker,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,girder/girder_worker | ---
+++
@@ -39,10 +39,8 @@
}
}
- # Use user-specified filename
out = romanesco.run(task, inputs=inputs)
- # We bound _stderr as a task output, so it should be in the output
self.assertEqual(out, {
'out.csv': {
'data': 'a,b,c\n5,2,3\n', |
9bf6aec99ac490fce1af2ea92bea57b7d1e9acd9 | heat/common/environment_format.py | heat/common/environment_format.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.template_format import yaml
SECTIONS = (PARAMETERS, RESOURCE_REGISTRY) = \
('parameters', 'resource_registry')
def parse(env_str):
'''
Takes a string and returns a dict containing the parsed structure.
This includes determination of whether the string is using the
JSON or YAML format.
'''
try:
env = yaml.safe_load(env_str)
except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e:
raise ValueError(e)
else:
if env is None:
env = {}
for param in env:
if param not in SECTIONS:
raise ValueError(_('environment has wrong section "%s"') % param)
return env
def default_for_missing(env):
'''
Checks a parsed environment for missing sections.
'''
for param in SECTIONS:
if param not in env:
env[param] = {}
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.template_format import yaml
from heat.common.template_format import yaml_loader
SECTIONS = (PARAMETERS, RESOURCE_REGISTRY) = \
('parameters', 'resource_registry')
def parse(env_str):
'''
Takes a string and returns a dict containing the parsed structure.
This includes determination of whether the string is using the
JSON or YAML format.
'''
try:
env = yaml.load(env_str, Loader=yaml_loader)
except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e:
raise ValueError(e)
else:
if env is None:
env = {}
for param in env:
if param not in SECTIONS:
raise ValueError(_('environment has wrong section "%s"') % param)
return env
def default_for_missing(env):
'''
Checks a parsed environment for missing sections.
'''
for param in SECTIONS:
if param not in env:
env[param] = {}
| Make the template and env yaml parsing more consistent | Make the template and env yaml parsing more consistent
in the environment_format.py use the same yaml_loader
Partial-bug: #1242155
Change-Id: I66b08415d450bd4758af648eaff0f20dd934a9cc
| Python | apache-2.0 | noironetworks/heat,ntt-sic/heat,openstack/heat,srznew/heat,dragorosson/heat,steveb/heat,gonzolino/heat,pratikmallya/heat,srznew/heat,redhat-openstack/heat,cryptickp/heat,pratikmallya/heat,cwolferh/heat-scratch,pshchelo/heat,NeCTAR-RC/heat,miguelgrinberg/heat,pshchelo/heat,dragorosson/heat,maestro-hybrid-cloud/heat,takeshineshiro/heat,jasondunsmore/heat,ntt-sic/heat,maestro-hybrid-cloud/heat,miguelgrinberg/heat,rh-s/heat,rdo-management/heat,jasondunsmore/heat,cryptickp/heat,dims/heat,steveb/heat,NeCTAR-RC/heat,noironetworks/heat,gonzolino/heat,rdo-management/heat,redhat-openstack/heat,rh-s/heat,cwolferh/heat-scratch,dims/heat,takeshineshiro/heat,openstack/heat | ---
+++
@@ -14,6 +14,7 @@
# under the License.
from heat.common.template_format import yaml
+from heat.common.template_format import yaml_loader
SECTIONS = (PARAMETERS, RESOURCE_REGISTRY) = \
@@ -27,7 +28,7 @@
JSON or YAML format.
'''
try:
- env = yaml.safe_load(env_str)
+ env = yaml.load(env_str, Loader=yaml_loader)
except (yaml.scanner.ScannerError, yaml.parser.ParserError) as e:
raise ValueError(e)
else: |
f280bc260b8a48e7b9e5d5d6a4995ca721440712 | brume/template.py | brume/template.py | import boto3
import sys
from colors import green, red
from botocore.exceptions import ClientError
s3_client = boto3.client('s3')
class InvalidTemplateError(BaseException):
def __init__(self, m):
self.m = m
def __str__(self):
return self.m
class Template():
def __init__(self, file):
self.file = file
try:
self.content = open(file, 'r').read()
except IOError as e:
print(red('File {!r} does not exist'.format(file)))
raise e
self.public_url = ''
self.key = ''
def validate(self):
sys.stdout.write('Validating {} ... '.format(self.file))
cfn_client = boto3.client('cloudformation')
try:
cfn_client.validate_template(TemplateBody=self.content)
except ClientError as e:
print(red('invalid'))
print(e)
exit(1)
else:
print(green('valid'))
return self
def upload(self, bucket, path):
self.key = path.strip('/') + '/' + self.file
self.public_url = 'https://{}.s3.amazonaws.com/{}'.format(bucket, self.key)
print("Publishing {} to {}".format(self.file, self.public_url))
s3_client.put_object(
Bucket=bucket,
Body=self.content,
ACL='public-read',
Key=path + '/' + self.file
)
return self
| import boto3
import sys
from colors import green, red
from botocore.exceptions import ClientError
s3_client = boto3.client('s3')
class InvalidTemplateError(BaseException):
def __init__(self, m):
self.m = m
def __str__(self):
return self.m
class CfnTemplate():
def __init__(self, file):
self.file = file
try:
self.content = open(file, 'r').read()
except IOError as e:
print(red('File {!r} does not exist'.format(file)))
raise e
self.public_url = ''
self.key = ''
def validate(self):
sys.stdout.write('Validating {} ... '.format(self.file))
cfn_client = boto3.client('cloudformation')
try:
cfn_client.validate_template(TemplateBody=self.content)
except ClientError as e:
print(red('invalid'))
print(e)
exit(1)
else:
print(green('valid'))
return self
def upload(self, bucket, path):
self.key = path.strip('/') + '/' + self.file
self.public_url = 'https://{}.s3.amazonaws.com/{}'.format(bucket, self.key)
print("Publishing {} to {}".format(self.file, self.public_url))
s3_client.put_object(
Bucket=bucket,
Body=self.content,
ACL='public-read',
Key=path + '/' + self.file
)
return self
| Rename Template to CfnTemplate to avoid name collision | Rename Template to CfnTemplate to avoid name collision
| Python | mit | flou/brume,geronimo-iia/brume | ---
+++
@@ -14,7 +14,7 @@
return self.m
-class Template():
+class CfnTemplate():
def __init__(self, file):
self.file = file
try: |
6500d388fa894bb0ea8cb0ca1328a73cc54ba4e8 | Challenges/chall_02.py | Challenges/chall_02.py | #!/usr/local/bin/python3
# Python Challenge - 2
# http://www.pythonchallenge.com/pc/def/ocr.html
# Keyword: equality
def main():
alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
letters = []
with open('garbage.txt', 'r') as garbage:
for line in garbage.readlines():
for c in line:
if c in alphabet:
letters.append(c)
print(''.join(letters))
return 0
if __name__ == '__main__':
main()
| #!/usr/local/bin/python3
# Python Challenge - 2
# http://www.pythonchallenge.com/pc/def/ocr.html
# Keyword: equality
import string
def main():
'''
Hint: recognize the characters. maybe they are in the book,
but MAYBE they are in the page source.
Page source text saved in garbage.txt
'''
alphabet = string.ascii_letters
with open('garbage.txt', 'r') as garbage:
letters = [c for line in garbage.readlines() for c in line
if c in alphabet]
# Long form of nested loops:
# letters = []
# for line in garbage.readlines():
# for c in line:
# if c in alphabet:
# letters.append(c)
print(''.join(letters))
return 0
if __name__ == '__main__':
main()
| Refactor code, add page hints | Refactor code, add page hints
| Python | mit | HKuz/PythonChallenge | ---
+++
@@ -3,15 +3,26 @@
# http://www.pythonchallenge.com/pc/def/ocr.html
# Keyword: equality
+import string
+
def main():
- alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz'
- letters = []
+ '''
+ Hint: recognize the characters. maybe they are in the book,
+ but MAYBE they are in the page source.
+ Page source text saved in garbage.txt
+ '''
+
+ alphabet = string.ascii_letters
with open('garbage.txt', 'r') as garbage:
- for line in garbage.readlines():
- for c in line:
- if c in alphabet:
- letters.append(c)
+ letters = [c for line in garbage.readlines() for c in line
+ if c in alphabet]
+ # Long form of nested loops:
+ # letters = []
+ # for line in garbage.readlines():
+ # for c in line:
+ # if c in alphabet:
+ # letters.append(c)
print(''.join(letters))
return 0 |
5631276591cf2c4e3c83920da32857e47286d9c9 | wanikani/django.py | wanikani/django.py | from __future__ import absolute_import
import os
import logging
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani')
with open(CONFIG_PATH) as fp:
API_KEY = fp.read()
logger = logging.getLogger(__name__)
class WaniKaniView(View):
def get(self, request, *args, **kwargs):
client = WaniKani(API_KEY)
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//My calendar product//mxm.dk//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
|
from __future__ import absolute_import
from django.http import HttpResponse
from django.views.generic.base import View
from icalendar import Calendar, Event
from wanikani.core import WaniKani, Radical, Kanji
class WaniKaniView(View):
def get(self, request, **kwargs):
client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue):
if not len(queue[ts]):
continue
counts = {
Radical: 0,
Kanji: 0,
}
for obj in queue[ts]:
counts[obj.__class__] += 1
event = Event()
event.add('summary', 'R: {0} K: {1}'.format(
counts[Radical], counts[Kanji]
))
event.add('dtstart', ts)
event.add('dtend', ts)
event['uid'] = str(ts)
cal.add_component(event)
return HttpResponse(
content=cal.to_ical(),
content_type='text/plain; charset=utf-8'
)
| Switch to getting the API key from the URL instead of a config file. | Switch to getting the API key from the URL instead of a config file.
Allows other people to get their anki calendar if they want. | Python | mit | kfdm/wanikani,kfdm/wanikani | ---
+++
@@ -1,7 +1,5 @@
+
from __future__ import absolute_import
-
-import os
-import logging
from django.http import HttpResponse
from django.views.generic.base import View
@@ -11,23 +9,16 @@
from wanikani.core import WaniKani, Radical, Kanji
-CONFIG_PATH = os.path.join(os.path.expanduser('~'), '.wanikani')
-
-with open(CONFIG_PATH) as fp:
- API_KEY = fp.read()
-
-logger = logging.getLogger(__name__)
-
class WaniKaniView(View):
- def get(self, request, *args, **kwargs):
- client = WaniKani(API_KEY)
+ def get(self, request, **kwargs):
+ client = WaniKani(kwargs['api_key'])
level = client.profile()['level']
queue = client.query(level, items=[Radical, Kanji], include=[u'apprentice'])
cal = Calendar()
- cal.add('prodid', '-//My calendar product//mxm.dk//')
+ cal.add('prodid', '-//Wanikani Blockers//github.com/kfdm/wanikani//')
cal.add('version', '2.0')
for ts in sorted(queue): |
1d9b7d855d633da6388daf663398449cfc0e6ab6 | StandaloneViewer/etc/redirectingSimpleServer.py | StandaloneViewer/etc/redirectingSimpleServer.py | import SimpleHTTPServer, SocketServer
import urlparse, os
PORT = 3000
class MyHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
# Parse query data to find out what was requested
parsedParams = urlparse.urlparse(self.path)
# See if the file requested exists
if os.access('.' + os.sep + parsedParams.path, os.R_OK):
# File exists, serve it up
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self);
else:
# send index.html
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
with open('index.html', 'r') as fin:
self.copyfile(fin, self.wfile)
Handler = MyHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
| import SimpleHTTPServer, SocketServer
import urlparse, os
PORT = 3000
## Note: If you set this parameter, you can try to serve files
# at a subdirectory. You should use
# -u http://localhost:3000/subdirectory
# when building the application, which will set this as your
# ROOT_URL.
#URL_PATH="/subdirectory"
URL_PATH=""
class MyHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
# Strip the subdirectory from the PATH
# e.g. localhost:3000/subdirectory/packages/ohif_polyfill/svg4everybody.min.js
# is interpreted by this script as localhost:3000/packages/ohif_polyfill/svg4everybody.min.js
# so the file is found properly.
self.path = self.path.replace(URL_PATH, "")
# Parse query data to find out what was requested
parsedParams = urlparse.urlparse(self.path)
# See if the file requested exists
if os.access('.' + os.sep + parsedParams.path, os.R_OK):
# File exists, serve it up
SimpleHTTPServer.SimpleHTTPRequestHandler.do_GET(self);
else:
# send index.html
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
with open('index.html', 'r') as fin:
self.copyfile(fin, self.wfile)
Handler = MyHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
| Update Python simple server script to allow subdomains | Update Python simple server script to allow subdomains
| Python | mit | OHIF/Viewers,OHIF/Viewers,OHIF/Viewers | ---
+++
@@ -3,8 +3,22 @@
PORT = 3000
+## Note: If you set this parameter, you can try to serve files
+# at a subdirectory. You should use
+# -u http://localhost:3000/subdirectory
+# when building the application, which will set this as your
+# ROOT_URL.
+#URL_PATH="/subdirectory"
+URL_PATH=""
+
class MyHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
+
+ # Strip the subdirectory from the PATH
+ # e.g. localhost:3000/subdirectory/packages/ohif_polyfill/svg4everybody.min.js
+ # is interpreted by this script as localhost:3000/packages/ohif_polyfill/svg4everybody.min.js
+ # so the file is found properly.
+ self.path = self.path.replace(URL_PATH, "")
# Parse query data to find out what was requested
parsedParams = urlparse.urlparse(self.path) |
7a4aeffc89120d0d5de53837a71f62ee21ba9bd6 | app/backend/wells/apps.py | app/backend/wells/apps.py | """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from django.apps import AppConfig
from django.db.models.signals import post_migrate
from django.db.models import Max
from gwells.db_comments import db_actions
logger = logging.getLogger(__name__)
def post_migration_callback(sender, **kwargs):
# Dynamic comments from models
db_actions.create_db_comments_from_models(db_actions.get_all_model_classes('wells.models'))
# NOTE: This is a temporary measure to reduce issues surrounding the well_tag_number sequece being
# incorrect after replication wells. This should be removed once we switch over to gwells for creating
# wells.
from wells.models import Well
from django.db import connection
result = Well.objects.all().aggregate(Max('well_tag_number'))
if result['well_tag_number__max']:
with connection.cursor() as cursor:
sql = "alter sequence well_well_tag_number_seq restart with {}".format(
result['well_tag_number__max'] + 1)
logger.info('altering well_well_tag_number_seq: {}'.format(sql))
cursor.execute(sql)
class WellsConfig(AppConfig):
name = 'wells'
def ready(self):
post_migrate.connect(post_migration_callback, sender=self)
| """
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from django.apps import AppConfig
from django.db.models.signals import post_migrate
from django.db.models import Max
from gwells.db_comments import db_actions
logger = logging.getLogger(__name__)
def post_migration_callback(sender, **kwargs):
# Dynamic comments from models
db_actions.create_db_comments_from_models(db_actions.get_all_model_classes('wells.models'))
class WellsConfig(AppConfig):
name = 'wells'
def ready(self):
post_migrate.connect(post_migration_callback, sender=self)
| Remove redundant post migrate step for increasing well_tag_number count (no longer needed, data is no longer being replicated) | Remove redundant post migrate step for increasing well_tag_number count (no longer needed, data is no longer being replicated)
| Python | apache-2.0 | bcgov/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells | ---
+++
@@ -27,20 +27,6 @@
# Dynamic comments from models
db_actions.create_db_comments_from_models(db_actions.get_all_model_classes('wells.models'))
- # NOTE: This is a temporary measure to reduce issues surrounding the well_tag_number sequece being
- # incorrect after replication wells. This should be removed once we switch over to gwells for creating
- # wells.
- from wells.models import Well
- from django.db import connection
-
- result = Well.objects.all().aggregate(Max('well_tag_number'))
- if result['well_tag_number__max']:
- with connection.cursor() as cursor:
- sql = "alter sequence well_well_tag_number_seq restart with {}".format(
- result['well_tag_number__max'] + 1)
- logger.info('altering well_well_tag_number_seq: {}'.format(sql))
- cursor.execute(sql)
-
class WellsConfig(AppConfig):
name = 'wells' |
d113fd75456c14f651cb9769d922d9394b369d63 | tools/add_previews.py | tools/add_previews.py | import os
import sys
import django
sys.path.append("/var/projects/museum/")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import *
from museum_site.common import *
from museum_site.constants import *
def main():
articles = Article.objects.filter(preview="").order_by("id")
for a in articles:
path = os.path.join(
SITE_ROOT, "museum_site/static/images/articles/previews/{}.png"
)
preview_path = path.format(a.id)
if os.path.isfile(preview_path):
print("[X]", a.id, a.title, preview_path)
a.preview = "articles/previews/{}.png".format(a.id)
a.save()
else:
print("[ ]", a.id, a.title)
return True
if __name__ == "__main__":
main()
| import os
import sys
import django
sys.path.append("/var/projects/museum/")
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
from museum_site.models import * # noqa: E402
from museum_site.common import * # noqa: E402
from museum_site.constants import * # noqa: E402
HELP = """This script will list all articles without a preview image.
If a PNG matched the article's PK is in static/images/articles/previews it
will automatically be assigned.
Press ENTER to begin."""
def main():
input(HELP)
articles = Article.objects.filter(preview="").order_by("id")
for a in articles:
path = os.path.join(
SITE_ROOT, "museum_site/static/images/articles/previews/{}.png"
)
preview_path = path.format(a.id)
if os.path.isfile(preview_path):
print("[X]", a.id, a.title, preview_path)
a.preview = "articles/previews/{}.png".format(a.id)
a.save()
else:
print("[ ]", a.id, a.title)
print("Done.")
return True
if __name__ == "__main__":
main()
| Add script explanation and message when script is complete. | Add script explanation and message when script is complete.
| Python | mit | DrDos0016/z2,DrDos0016/z2,DrDos0016/z2 | ---
+++
@@ -7,13 +7,22 @@
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "museum.settings")
django.setup()
-from museum_site.models import *
-from museum_site.common import *
-from museum_site.constants import *
+from museum_site.models import * # noqa: E402
+from museum_site.common import * # noqa: E402
+from museum_site.constants import * # noqa: E402
+
+HELP = """This script will list all articles without a preview image.
+If a PNG matched the article's PK is in static/images/articles/previews it
+will automatically be assigned.
+
+Press ENTER to begin."""
def main():
+ input(HELP)
+
articles = Article.objects.filter(preview="").order_by("id")
+
for a in articles:
path = os.path.join(
SITE_ROOT, "museum_site/static/images/articles/previews/{}.png"
@@ -26,6 +35,7 @@
else:
print("[ ]", a.id, a.title)
+ print("Done.")
return True
|
878811a673625f9dbe0f41dd0196887f612ecf2e | expand_region_handler.py | expand_region_handler.py | import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=None):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end) | import re
try:
import javascript
import html
except:
from . import javascript
from . import html
def expand(string, start, end, extension=""):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end)
return javascript.expand(string, start, end) | Set default file extension to empty string | Set default file extension to empty string
Because with a Bool the regex throws
| Python | mit | johyphenel/sublime-expand-region,johyphenel/sublime-expand-region,aronwoost/sublime-expand-region | ---
+++
@@ -7,7 +7,7 @@
from . import javascript
from . import html
-def expand(string, start, end, extension=None):
+def expand(string, start, end, extension=""):
if(re.compile("html|htm|xml").search(extension)):
return html.expand(string, start, end) |
582964f9da6029cd089117496babf9267c41ecd5 | evewspace/core/utils.py | evewspace/core/utils.py | # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from core.models import ConfigEntry
def get_config(name, user):
"""
Gets the correct config value for the given key name.
Value with the given user has priority over any default value.
"""
if ConfigEntry.objects.filter(name=name, user=user).count() != 0:
return ConfigEntry.objects.get(name=name, user=user)
# No user value, look for global / default
if ConfigEntry.objects.filter(name=name, user=None).count() != 0:
return ConfigEntry.objects.get(name=name, user=None)
else:
raise KeyError("No configuration entry with key %s was found." % name)
| # Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from core.models import ConfigEntry
def get_config(name, user):
"""
Gets the correct config value for the given key name.
Value with the given user has priority over any default value.
"""
try:
return ConfigEntry.objects.get(name=name, user=user)
except ConfigEntry.DoesNotExist:
return ConfigEntry.objects.get(name=name, user=None)
| Reduce queries used to lookup config | Reduce queries used to lookup config
| Python | apache-2.0 | evewspace/eve-wspace,nyrocron/eve-wspace,hybrid1969/eve-wspace,hybrid1969/eve-wspace,acdervis/eve-wspace,marbindrakon/eve-wspace,Unsettled/eve-wspace,proycon/eve-wspace,mmalyska/eve-wspace,evewspace/eve-wspace,gpapaz/eve-wspace,acdervis/eve-wspace,proycon/eve-wspace,marbindrakon/eve-wspace,Zumochi/eve-wspace,marbindrakon/eve-wspace,gpapaz/eve-wspace,Unsettled/eve-wspace,gpapaz/eve-wspace,hybrid1969/eve-wspace,Unsettled/eve-wspace,nyrocron/eve-wspace,proycon/eve-wspace,Zumochi/eve-wspace,nyrocron/eve-wspace,Maarten28/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,acdervis/eve-wspace,marbindrakon/eve-wspace,proycon/eve-wspace,mmalyska/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,Unsettled/eve-wspace,Zumochi/eve-wspace,evewspace/eve-wspace,Maarten28/eve-wspace,acdervis/eve-wspace,Zumochi/eve-wspace,gpapaz/eve-wspace,evewspace/eve-wspace,hybrid1969/eve-wspace,nyrocron/eve-wspace | ---
+++
@@ -21,11 +21,7 @@
Gets the correct config value for the given key name.
Value with the given user has priority over any default value.
"""
- if ConfigEntry.objects.filter(name=name, user=user).count() != 0:
+ try:
return ConfigEntry.objects.get(name=name, user=user)
-
- # No user value, look for global / default
- if ConfigEntry.objects.filter(name=name, user=None).count() != 0:
+ except ConfigEntry.DoesNotExist:
return ConfigEntry.objects.get(name=name, user=None)
- else:
- raise KeyError("No configuration entry with key %s was found." % name) |
dcc2821cac0619fc2ca5f486ad30416f3c3cfda9 | ce/expr/parser.py | ce/expr/parser.py | #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
def _parse_r(s):
s = s.strip()
bracket_level = 0
operator_pos = -1
for i, v in enumerate(s):
if v == '(':
bracket_level += 1
if v == ')':
bracket_level -= 1
if bracket_level == 1 and v in OPERATORS:
operator_pos = i
break
if operator_pos == -1:
return s
a1 = _parse_r(s[1:operator_pos].strip())
a2 = _parse_r(s[operator_pos + 1:-1].strip())
return Expr(s[operator_pos], a1, a2)
| #!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
def try_to_number(s):
try:
return mpq(s)
except (ValueError, TypeError):
return s
OPERATOR_MAP = {
ast.Add: ADD_OP,
ast.Mult: MULTIPLY_OP,
}
def parse(s):
from .biop import Expr
def _parse_r(t):
try:
return t.n
except AttributeError:
pass
try:
return t.id
except AttributeError:
op = OPERATOR_MAP[t.op.__class__]
a1 = _parse_r(t.left)
a2 = _parse_r(t.right)
return Expr(op, a1, a2)
return _parse_r(ast.parse(s, mode='eval').body)
| Replace parsing with Python's ast | Replace parsing with Python's ast
Allows greater flexibility and syntax checks
| Python | mit | admk/soap | ---
+++
@@ -1,6 +1,8 @@
#!/usr/bin/env python
# vim: set fileencoding=UTF-8 :
+
+import ast
from ..semantics import mpq
from .common import OPERATORS, ADD_OP, MULTIPLY_OP
@@ -13,20 +15,24 @@
return s
-def _parse_r(s):
- s = s.strip()
- bracket_level = 0
- operator_pos = -1
- for i, v in enumerate(s):
- if v == '(':
- bracket_level += 1
- if v == ')':
- bracket_level -= 1
- if bracket_level == 1 and v in OPERATORS:
- operator_pos = i
- break
- if operator_pos == -1:
- return s
- a1 = _parse_r(s[1:operator_pos].strip())
- a2 = _parse_r(s[operator_pos + 1:-1].strip())
- return Expr(s[operator_pos], a1, a2)
+OPERATOR_MAP = {
+ ast.Add: ADD_OP,
+ ast.Mult: MULTIPLY_OP,
+}
+
+
+def parse(s):
+ from .biop import Expr
+ def _parse_r(t):
+ try:
+ return t.n
+ except AttributeError:
+ pass
+ try:
+ return t.id
+ except AttributeError:
+ op = OPERATOR_MAP[t.op.__class__]
+ a1 = _parse_r(t.left)
+ a2 = _parse_r(t.right)
+ return Expr(op, a1, a2)
+ return _parse_r(ast.parse(s, mode='eval').body) |
920e75491f3aaa74980e11086cfebe911c2def4b | statsmodels/datasets/tests/test_data.py | statsmodels/datasets/tests/test_data.py | import numpy as np
import pandas as pd
import statsmodels.datasets as datasets
from statsmodels.datasets import co2
from statsmodels.datasets.utils import Dataset
def test_co2_python3():
# this failed in pd.to_datetime on Python 3 with pandas <= 0.12.0
dta = co2.load_pandas()
class TestDatasets(object):
@classmethod
def setup_class(cls):
exclude = ['check_internet', 'clear_data_home', 'get_data_home',
'get_rdataset', 'tests', 'utils', 'webuse']
cls.sets = []
for dataset_name in dir(datasets):
if not dataset_name.startswith('_') and dataset_name not in exclude:
cls.sets.append(dataset_name)
def check(self, dataset_name):
dataset = __import__('statsmodels.datasets.' + dataset_name, fromlist=[''])
data = dataset.load()
assert isinstance(data, Dataset)
assert isinstance(data.data, np.recarray)
df_data = dataset.load_pandas()
assert isinstance(data, Dataset)
assert isinstance(df_data.data, pd.DataFrame)
def test_all_datasets(self):
for dataset_name in self.sets:
yield (self.check, dataset_name)
| import importlib
import numpy as np
import pandas as pd
import nose
import pytest
import statsmodels.datasets
from statsmodels.datasets.utils import Dataset
exclude = ['check_internet', 'clear_data_home', 'get_data_home',
'get_rdataset', 'tests', 'utils', 'webuse']
datasets = []
for dataset_name in dir(statsmodels.datasets):
if not dataset_name.startswith('_') and dataset_name not in exclude:
datasets.append(dataset_name)
# TODO: Remove nottest when nose support is dropped
@nose.tools.nottest
@pytest.mark.parametrize('dataset_name', datasets)
def test_dataset(dataset_name):
dataset = importlib.import_module('statsmodels.datasets.' + dataset_name)
data = dataset.load()
assert isinstance(data, Dataset)
assert isinstance(data.data, np.recarray)
df_data = dataset.load_pandas()
assert isinstance(df_data, Dataset)
assert isinstance(df_data.data, pd.DataFrame)
# TODO: Remove when nose support is dropped
def test_all_datasets():
for dataset in datasets:
test_dataset(dataset)
| Remove yield from datasets tests | TST: Remove yield from datasets tests
Remove yield which is pending deprecation in pytest
xref #4000
| Python | bsd-3-clause | josef-pkt/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,jseabold/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,bashtage/statsmodels,jseabold/statsmodels,bashtage/statsmodels,ChadFulton/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jseabold/statsmodels | ---
+++
@@ -1,37 +1,36 @@
+import importlib
+
import numpy as np
import pandas as pd
+import nose
+import pytest
-import statsmodels.datasets as datasets
-from statsmodels.datasets import co2
+import statsmodels.datasets
from statsmodels.datasets.utils import Dataset
-
-def test_co2_python3():
- # this failed in pd.to_datetime on Python 3 with pandas <= 0.12.0
- dta = co2.load_pandas()
+exclude = ['check_internet', 'clear_data_home', 'get_data_home',
+ 'get_rdataset', 'tests', 'utils', 'webuse']
+datasets = []
+for dataset_name in dir(statsmodels.datasets):
+ if not dataset_name.startswith('_') and dataset_name not in exclude:
+ datasets.append(dataset_name)
-class TestDatasets(object):
+# TODO: Remove nottest when nose support is dropped
+@nose.tools.nottest
+@pytest.mark.parametrize('dataset_name', datasets)
+def test_dataset(dataset_name):
+ dataset = importlib.import_module('statsmodels.datasets.' + dataset_name)
+ data = dataset.load()
+ assert isinstance(data, Dataset)
+ assert isinstance(data.data, np.recarray)
- @classmethod
- def setup_class(cls):
- exclude = ['check_internet', 'clear_data_home', 'get_data_home',
- 'get_rdataset', 'tests', 'utils', 'webuse']
- cls.sets = []
- for dataset_name in dir(datasets):
- if not dataset_name.startswith('_') and dataset_name not in exclude:
- cls.sets.append(dataset_name)
+ df_data = dataset.load_pandas()
+ assert isinstance(df_data, Dataset)
+ assert isinstance(df_data.data, pd.DataFrame)
- def check(self, dataset_name):
- dataset = __import__('statsmodels.datasets.' + dataset_name, fromlist=[''])
- data = dataset.load()
- assert isinstance(data, Dataset)
- assert isinstance(data.data, np.recarray)
- df_data = dataset.load_pandas()
- assert isinstance(data, Dataset)
- assert isinstance(df_data.data, pd.DataFrame)
-
- def test_all_datasets(self):
- for dataset_name in self.sets:
- yield (self.check, dataset_name)
+# TODO: Remove when nose support is dropped
+def test_all_datasets():
+ for dataset in datasets:
+ test_dataset(dataset) |
97badc176f4a8ac30eb3932359e2e132e36170c4 | docker/gunicorn_config.py | docker/gunicorn_config.py | import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 2
timeout = 60
threads = multiprocessing.cpu_count() * 2
max_requests = 1000
max_requests_jitter = 5
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
errorlog = '/var/log/gunicorn/gunicorn-error.log'
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except:
pass
| import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
workers = multiprocessing.cpu_count() * 3
timeout = 60
threads = multiprocessing.cpu_count() * 3
max_requests = 500
max_requests_jitter = 5
# Read the DEBUG setting from env var
try:
if getenv('DOCKER_SAL_DEBUG').lower() == 'true':
errorlog = '/var/log/gunicorn/gunicorn-error.log'
accesslog = '/var/log/gunicorn/gunicorn-access.log'
loglevel = 'info'
except:
pass
| Increase the number of workers | Increase the number of workers
| Python | apache-2.0 | sheagcraig/sal,chasetb/sal,sheagcraig/sal,erikng/sal,salopensource/sal,salopensource/sal,chasetb/sal,erikng/sal,chasetb/sal,erikng/sal,chasetb/sal,erikng/sal,sheagcraig/sal,salopensource/sal,salopensource/sal,sheagcraig/sal | ---
+++
@@ -1,10 +1,10 @@
import multiprocessing
from os import getenv
bind = '127.0.0.1:8001'
-workers = multiprocessing.cpu_count() * 2
+workers = multiprocessing.cpu_count() * 3
timeout = 60
-threads = multiprocessing.cpu_count() * 2
-max_requests = 1000
+threads = multiprocessing.cpu_count() * 3
+max_requests = 500
max_requests_jitter = 5
# Read the DEBUG setting from env var
try: |
15090b84e1c7359c49cb45aec4d9b4d492f855ac | tests/scoring_engine/engine/checks/test_smb.py | tests/scoring_engine/engine/checks/test_smb.py | from scoring_engine.engine.basic_check import CHECKS_BIN_PATH
from tests.scoring_engine.engine.checks.check_test import CheckTest
class TestSMBCheck(CheckTest):
check_name = 'SMBCheck'
required_properties = ['share', 'file', 'hash']
properties = {
'share': 'ScoringShare',
'file': 'flag.txt',
'hash': '123456789'
}
accounts = {
'pwnbus': 'pwnbuspass'
}
cmd = CHECKS_BIN_PATH + "/smb_check --host '127.0.0.1' --user 'pwnbus' --pass 'pwnbuspass' --share 'ScoringShare' --file 'flag.txt' --hash '123456789'"
| from scoring_engine.engine.basic_check import CHECKS_BIN_PATH
from tests.scoring_engine.engine.checks.check_test import CheckTest
class TestSMBCheck(CheckTest):
check_name = 'SMBCheck'
required_properties = ['share', 'file', 'hash']
properties = {
'share': 'ScoringShare',
'file': 'flag.txt',
'hash': '123456789'
}
accounts = {
'pwnbus': 'pwnbuspass'
}
cmd = CHECKS_BIN_PATH + "/smb_check --host '127.0.0.1' --port 100 --user 'pwnbus' --pass 'pwnbuspass' --share 'ScoringShare' --file 'flag.txt' --hash '123456789'"
| Update smb test to include port parameter | Update smb test to include port parameter
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | ---
+++
@@ -14,4 +14,4 @@
accounts = {
'pwnbus': 'pwnbuspass'
}
- cmd = CHECKS_BIN_PATH + "/smb_check --host '127.0.0.1' --user 'pwnbus' --pass 'pwnbuspass' --share 'ScoringShare' --file 'flag.txt' --hash '123456789'"
+ cmd = CHECKS_BIN_PATH + "/smb_check --host '127.0.0.1' --port 100 --user 'pwnbus' --pass 'pwnbuspass' --share 'ScoringShare' --file 'flag.txt' --hash '123456789'" |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.