commit
stringlengths 40
40
| old_file
stringlengths 4
150
| new_file
stringlengths 4
150
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
501
| message
stringlengths 15
4.06k
| lang
stringclasses 4
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
| diff
stringlengths 0
4.35k
|
|---|---|---|---|---|---|---|---|---|---|---|
cee76d5c2216f6f42a54b28a753fe288accc40d7
|
corehq/apps/accounting/migrations/0026_auto_20180508_1956.py
|
corehq/apps/accounting/migrations/0026_auto_20180508_1956.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def noop(*args, **kwargs):
pass
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field, reverse_code=noop)
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-05-08 19:56
from __future__ import unicode_literals
from __future__ import absolute_import
from django.db import migrations
from corehq.sql_db.operations import HqRunPython
def noop(*args, **kwargs):
pass
def _convert_emailed_to_array_field(apps, schema_editor):
BillingRecord = apps.get_model('accounting', 'BillingRecord')
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
record.save()
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
wirerecord.save()
class Migration(migrations.Migration):
dependencies = [
('accounting', '0025_auto_20180508_1952'),
]
operations = [
HqRunPython(_convert_emailed_to_array_field, reverse_code=noop)
]
|
Save record in migrations file
|
Save record in migrations file
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
---
+++
@@ -16,11 +16,13 @@
for record in BillingRecord.objects.all():
if record.emailed_to != '':
record.emailed_to_list = record.emailed_to.split(',')
+ record.save()
WireBillingRecord = apps.get_model('accounting', 'WireBillingRecord')
for wirerecord in WireBillingRecord.objects.all():
if wirerecord.emailed_to != '':
wirerecord.emailed_to_list = wirerecord.emailed_to.split(',')
+ wirerecord.save()
class Migration(migrations.Migration):
|
f646fb88e793897cf827fd7d9484386a4bb08594
|
pages/tests/test_serializers.py
|
pages/tests/test_serializers.py
|
from mock import patch
from django.test import TestCase
from rest_framework.reverse import reverse
from .. import serializers
from . import factories
from pages.utils import build_url
class PageSerializerTest(TestCase):
def expected_data(self, page):
expected = {
'id': page.pk,
'url': page.get_absolute_url(),
'name': page.name,
'slug': page.slug,
'regions': page.rendered_regions(),
}
return expected
def test_serialize(self):
page = factories.PageFactory.create()
serializer = serializers.PageSerializer(page)
self.assertEqual(serializer.data, self.expected_data(page))
class GroupTest(TestCase):
url_path = 'pages.models.Group.get_absolute_url'
mocked_url = '/mocked_url'
def expected_data(self, group):
slug = group.slug
return {
'url': self.mocked_url,
'slug': slug,
'links': {
'pages': build_url(reverse('pages:page-list'), {'group': slug}),
},
}
@patch(url_path)
def test_serialize(self, group_url):
group_url.return_value = self.mocked_url
group = factories.GroupFactory.create()
serializer = serializers.GroupSerializer(group)
self.assertEqual(serializer.data, self.expected_data(group))
|
from rest_framework.reverse import reverse
from user_management.models.tests.utils import APIRequestTestCase
from .. import serializers
from . import factories
from pages.utils import build_url
class PageSerializerTest(APIRequestTestCase):
def setUp(self):
self.request = self.create_request()
self.context = {'request': self.request}
def expected_data(self, page):
expected = {
'id': page.pk,
'url': page.get_absolute_url(self.request),
'name': page.name,
'slug': page.slug,
'regions': page.rendered_regions(),
}
return expected
def test_serialize(self):
page = factories.PageFactory.create()
serializer = serializers.PageSerializer(page, context=self.context)
self.assertEqual(serializer.data, self.expected_data(page))
class GroupTest(APIRequestTestCase):
def setUp(self):
self.request = self.create_request()
self.context = {'request': self.request}
def expected_data(self, group):
slug = group.slug
return {
'url': group.get_absolute_url(self.request),
'slug': slug,
'links': {
'pages': build_url(
reverse('pages:page-list', request=self.request),
{'group': slug},
),
},
}
def test_serialize(self):
group = factories.GroupFactory.create()
serializer = serializers.GroupSerializer(group, context=self.context)
self.assertEqual(serializer.data, self.expected_data(group))
|
Fix DeprecationWarnings for serializer context
|
Fix DeprecationWarnings for serializer context
* Pass a context containing a request to every serializer
Conflicts:
pages/tests/test_serializers.py
|
Python
|
bsd-2-clause
|
incuna/feincms-pages-api
|
---
+++
@@ -1,18 +1,20 @@
-from mock import patch
-
-from django.test import TestCase
from rest_framework.reverse import reverse
+from user_management.models.tests.utils import APIRequestTestCase
from .. import serializers
from . import factories
from pages.utils import build_url
-class PageSerializerTest(TestCase):
+class PageSerializerTest(APIRequestTestCase):
+ def setUp(self):
+ self.request = self.create_request()
+ self.context = {'request': self.request}
+
def expected_data(self, page):
expected = {
'id': page.pk,
- 'url': page.get_absolute_url(),
+ 'url': page.get_absolute_url(self.request),
'name': page.name,
'slug': page.slug,
'regions': page.rendered_regions(),
@@ -21,28 +23,30 @@
def test_serialize(self):
page = factories.PageFactory.create()
- serializer = serializers.PageSerializer(page)
+ serializer = serializers.PageSerializer(page, context=self.context)
self.assertEqual(serializer.data, self.expected_data(page))
-class GroupTest(TestCase):
- url_path = 'pages.models.Group.get_absolute_url'
- mocked_url = '/mocked_url'
+class GroupTest(APIRequestTestCase):
+ def setUp(self):
+ self.request = self.create_request()
+ self.context = {'request': self.request}
def expected_data(self, group):
slug = group.slug
return {
- 'url': self.mocked_url,
+ 'url': group.get_absolute_url(self.request),
'slug': slug,
'links': {
- 'pages': build_url(reverse('pages:page-list'), {'group': slug}),
+ 'pages': build_url(
+ reverse('pages:page-list', request=self.request),
+ {'group': slug},
+ ),
},
}
- @patch(url_path)
- def test_serialize(self, group_url):
- group_url.return_value = self.mocked_url
+ def test_serialize(self):
group = factories.GroupFactory.create()
- serializer = serializers.GroupSerializer(group)
+ serializer = serializers.GroupSerializer(group, context=self.context)
self.assertEqual(serializer.data, self.expected_data(group))
|
cf9b1805b55c567c5f5365d3f2f51f119304e6ba
|
AC_tools/__init__.py
|
AC_tools/__init__.py
|
# compatibility with both python 2 and 3
from __future__ import print_function
from . plotting_REDUNDENT import *
from . plotting import *
from . variables import *
from . AC_time import *
from . planeflight import *
from . generic import *
from . core import *
from . GEOSChem_bpch import *
from . GEOSChem_nc import *
import numpy as np
"""
AC_tools is a module of functions started by Tomas, and contributed to by others in the Evans' group, and hopefully maintained by the Group.
To access the help, from python or ipython, type help(AC_tools) to get general help
To get more detailed help from a module for example, type help(AC_tools.AC_time.py)
If you find missing documentation any thing is unclear in any of this, please request a git push to github.
"""
# Setup logging for module
import logging
level = logging.DEBUG
FORMAT = "%(levelname)8s - %(message)s @---> %(filename)s:%(lineno)s %(funcName)s()"
logging.basicConfig(filename='AC_tools.log', filemode='w', level=level,
format=FORMAT)
logging.getLogger().setLevel(level)
# Import submodules here for easier access
|
# compatibility with both python 2 and 3
from __future__ import print_function
import numpy as np
# AC_tools modules
from . AC_time import *
from . core import *
from . generic import *
from . GEOSChem_bpch import *
from . GEOSChem_nc import *
from . KPP import *
from . planeflight import *
from . plotting_REDUNDENT import *
from . plotting import *
from . SMVGEAR import *
from . variables import *
"""
AC_tools is a module of functions started by Tomas, and contributed to by others in the Evans' group, and hopefully maintained by the Group.
To access the help, from python or ipython, type help(AC_tools) to get general help
To get more detailed help from a module for example, type help(AC_tools.AC_time.py)
If you find missing documentation any thing is unclear in any of this, please request a git push to github.
"""
# Setup logging for module
import logging
level = logging.DEBUG
FORMAT = "%(levelname)8s - %(message)s @---> %(filename)s:%(lineno)s %(funcName)s()"
logging.basicConfig(filename='AC_tools.log', filemode='w', level=level,
format=FORMAT)
logging.getLogger().setLevel(level)
# Import submodules here for easier access
|
Include integrator modules for KPP and SMVGEAR
|
Include integrator modules for KPP and SMVGEAR
|
Python
|
mit
|
tsherwen/AC_tools,tsherwen/AC_tools
|
---
+++
@@ -1,15 +1,19 @@
# compatibility with both python 2 and 3
from __future__ import print_function
+import numpy as np
+# AC_tools modules
+from . AC_time import *
+from . core import *
+from . generic import *
+from . GEOSChem_bpch import *
+from . GEOSChem_nc import *
+from . KPP import *
+from . planeflight import *
from . plotting_REDUNDENT import *
from . plotting import *
+from . SMVGEAR import *
from . variables import *
-from . AC_time import *
-from . planeflight import *
-from . generic import *
-from . core import *
-from . GEOSChem_bpch import *
-from . GEOSChem_nc import *
-import numpy as np
+
"""
AC_tools is a module of functions started by Tomas, and contributed to by others in the Evans' group, and hopefully maintained by the Group.
To access the help, from python or ipython, type help(AC_tools) to get general help
|
419131bba11cab27c36ef2b21199cdc3540cde16
|
byceps/services/shop/order/actions/revoke_ticket_bundles.py
|
byceps/services/shop/order/actions/revoke_ticket_bundles.py
|
"""
byceps.services.shop.order.actions.revoke_ticket_bundles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket.revoke_ticket_bundles(order, initiator_id)
|
"""
byceps.services.shop.order.actions.revoke_ticket_bundles
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2022 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from .....typing import UserID
from ..transfer.action import ActionParameters
from ..transfer.order import Order
from . import ticket_bundle
def revoke_ticket_bundles(
order: Order,
bundle_quantity: int,
initiator_id: UserID,
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
ticket_bundle.revoke_ticket_bundles(order, initiator_id)
|
Fix ticket bundle revocation order action
|
Fix ticket bundle revocation order action
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
---
+++
@@ -11,7 +11,7 @@
from ..transfer.action import ActionParameters
from ..transfer.order import Order
-from . import ticket
+from . import ticket_bundle
def revoke_ticket_bundles(
@@ -21,4 +21,4 @@
parameters: ActionParameters,
) -> None:
"""Revoke all ticket bundles in this order."""
- ticket.revoke_ticket_bundles(order, initiator_id)
+ ticket_bundle.revoke_ticket_bundles(order, initiator_id)
|
ece899be682b8713c27e5a4d6189c09afb722c5c
|
partner_firstname/migrations/12.0.1.0.0/pre-ir_config_param.py
|
partner_firstname/migrations/12.0.1.0.0/pre-ir_config_param.py
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE name = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
def store_ir_config_param(cr):
"""Prior to version 12.0 the default order of partner
names was last_first. In order to retain this behaviour we
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
"WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
def migrate(cr, version):
store_ir_config_param(cr)
|
Fix migration script 12.0.1.0.0 of partner_firstname
|
[12.0] Fix migration script 12.0.1.0.0 of partner_firstname
|
Python
|
agpl-3.0
|
BT-rmartin/partner-contact,BT-rmartin/partner-contact,OCA/partner-contact,OCA/partner-contact
|
---
+++
@@ -4,7 +4,7 @@
store the config parameter if it is not present.
"""
cr.execute("SELECT 1 FROM ir_config_parameter "
- "WHERE name = 'partner_names_order'")
+ "WHERE key = 'partner_names_order'")
if not cr.fetchone():
cr.execute("INSERT INTO ir_config_parameter (key, value) VALUES "
"('partner_names_order', 'last_first')")
|
83d45c0fa64da347eec6b96f46c5eb1fbfe516d4
|
plugins/call_bad_permissions.py
|
plugins/call_bad_permissions.py
|
# -*- coding:utf-8 -*-
#
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import bandit
import stat
from bandit.test_selector import *
@checks_functions
def call_bad_permissions(context):
if 'chmod' in context.call_function_name:
if context.call_args_count == 2:
mode = context.get_call_arg_at_position(1)
if mode is not None and (mode & stat.S_IWOTH or mode & stat.S_IXGRP):
filename = context.get_call_arg_at_position(0)
if filename is None:
filename = 'NOT PARSED'
return(bandit.ERROR, 'Chmod setting a permissive mask %s on '
'file (%s).' % (oct(mode), filename))
|
# -*- coding:utf-8 -*-
#
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import bandit
import stat
from bandit.test_selector import *
@checks_functions
def call_bad_permissions(context):
if 'chmod' in context.call_function_name:
if context.call_args_count == 2:
mode = context.get_call_arg_at_position(1)
if(mode is not None and type(mode) == int and
(mode & stat.S_IWOTH or mode & stat.S_IXGRP)):
filename = context.get_call_arg_at_position(0)
if filename is None:
filename = 'NOT PARSED'
return(bandit.ERROR, 'Chmod setting a permissive mask %s on '
'file (%s).' % (oct(mode), filename))
|
Fix bug with permissions matching
|
Fix bug with permissions matching
|
Python
|
apache-2.0
|
chair6/bandit,stackforge/bandit,austin987/bandit,pombredanne/bandit,stackforge/bandit,pombredanne/bandit
|
---
+++
@@ -24,7 +24,8 @@
if context.call_args_count == 2:
mode = context.get_call_arg_at_position(1)
- if mode is not None and (mode & stat.S_IWOTH or mode & stat.S_IXGRP):
+ if(mode is not None and type(mode) == int and
+ (mode & stat.S_IWOTH or mode & stat.S_IXGRP)):
filename = context.get_call_arg_at_position(0)
if filename is None:
filename = 'NOT PARSED'
|
99cbd692ae6b7dab65aae9d77fd2d8333fe075e4
|
Lib/scipy_version.py
|
Lib/scipy_version.py
|
major = 0
minor = 3
micro = 3
#release_level = 'alpha'
release_level=''
from __cvs_version__ import cvs_version
cvs_minor = cvs_version[-3]
cvs_serial = cvs_version[-1]
if release_level:
scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
'_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
else:
scipy_version = '%(major)d.%(minor)d.%(micro)d'\
'_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
|
major = 0
minor = 3
micro = 2
#release_level = 'alpha'
release_level=''
try:
from __cvs_version__ import cvs_version
cvs_minor = cvs_version[-3]
cvs_serial = cvs_version[-1]
except ImportError,msg:
print msg
cvs_minor = 0
cvs_serial = 0
if cvs_minor or cvs_serial:
if release_level:
scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
'_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
else:
scipy_version = '%(major)d.%(minor)d.%(micro)d'\
'_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
else:
if release_level:
scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
% (locals ())
else:
scipy_version = '%(major)d.%(minor)d.%(micro)d'\
% (locals ())
|
Handle missing __cvs_version__.py in a branch.
|
Handle missing __cvs_version__.py in a branch.
|
Python
|
bsd-3-clause
|
gef756/scipy,aman-iitj/scipy,jakevdp/scipy,efiring/scipy,Eric89GXL/scipy,WarrenWeckesser/scipy,zerothi/scipy,argriffing/scipy,futurulus/scipy,juliantaylor/scipy,tylerjereddy/scipy,fernand/scipy,ndchorley/scipy,fredrikw/scipy,niknow/scipy,nmayorov/scipy,nmayorov/scipy,Gillu13/scipy,WarrenWeckesser/scipy,mdhaber/scipy,witcxc/scipy,Kamp9/scipy,vanpact/scipy,piyush0609/scipy,aarchiba/scipy,jamestwebber/scipy,gertingold/scipy,scipy/scipy,WarrenWeckesser/scipy,mortonjt/scipy,trankmichael/scipy,perimosocordiae/scipy,zxsted/scipy,pbrod/scipy,juliantaylor/scipy,chatcannon/scipy,richardotis/scipy,vanpact/scipy,cpaulik/scipy,FRidh/scipy,behzadnouri/scipy,arokem/scipy,niknow/scipy,zaxliu/scipy,andyfaff/scipy,newemailjdm/scipy,apbard/scipy,jsilter/scipy,e-q/scipy,FRidh/scipy,Newman101/scipy,kalvdans/scipy,zxsted/scipy,fernand/scipy,befelix/scipy,endolith/scipy,ndchorley/scipy,scipy/scipy,perimosocordiae/scipy,person142/scipy,zaxliu/scipy,trankmichael/scipy,rmcgibbo/scipy,kleskjr/scipy,mtrbean/scipy,teoliphant/scipy,e-q/scipy,Eric89GXL/scipy,pizzathief/scipy,jonycgn/scipy,woodscn/scipy,rmcgibbo/scipy,futurulus/scipy,ales-erjavec/scipy,minhlongdo/scipy,newemailjdm/scipy,zxsted/scipy,andim/scipy,sriki18/scipy,trankmichael/scipy,lhilt/scipy,mdhaber/scipy,vhaasteren/scipy,vberaudi/scipy,maciejkula/scipy,endolith/scipy,ales-erjavec/scipy,Gillu13/scipy,raoulbq/scipy,jseabold/scipy,felipebetancur/scipy,endolith/scipy,jamestwebber/scipy,efiring/scipy,nmayorov/scipy,teoliphant/scipy,mikebenfield/scipy,pyramania/scipy,person142/scipy,larsmans/scipy,matthew-brett/scipy,Stefan-Endres/scipy,lukauskas/scipy,WarrenWeckesser/scipy,Srisai85/scipy,vanpact/scipy,mdhaber/scipy,jakevdp/scipy,hainm/scipy,mgaitan/scipy,sonnyhu/scipy,grlee77/scipy,WarrenWeckesser/scipy,hainm/scipy,jonycgn/scipy,befelix/scipy,mhogg/scipy,niknow/scipy,pbrod/scipy,mgaitan/scipy,hainm/scipy,behzadnouri/scipy,matthewalbani/scipy,Shaswat27/scipy,vhaasteren/scipy,richardotis/scipy,pschella/scipy,pschella/scipy,cpaulik/scipy,fernand/scipy,aarchiba/scipy,haudren/scipy,nonhermitian/scipy,vigna/scipy,behzadnouri/scipy,teoliphant/scipy,rgommers/scipy,jonycgn/scipy,endolith/scipy,arokem/scipy,ogrisel/scipy,felipebetancur/scipy,petebachant/scipy,sargas/scipy,witcxc/scipy,giorgiop/scipy,lukauskas/scipy,richardotis/scipy,trankmichael/scipy,andyfaff/scipy,vhaasteren/scipy,Dapid/scipy,argriffing/scipy,mgaitan/scipy,Shaswat27/scipy,ndchorley/scipy,vhaasteren/scipy,ogrisel/scipy,zerothi/scipy,mingwpy/scipy,jakevdp/scipy,anntzer/scipy,Gillu13/scipy,sonnyhu/scipy,teoliphant/scipy,WillieMaddox/scipy,kleskjr/scipy,andyfaff/scipy,matthewalbani/scipy,jsilter/scipy,sriki18/scipy,Eric89GXL/scipy,raoulbq/scipy,haudren/scipy,pbrod/scipy,giorgiop/scipy,juliantaylor/scipy,Dapid/scipy,gdooper/scipy,dch312/scipy,piyush0609/scipy,larsmans/scipy,sauliusl/scipy,matthewalbani/scipy,jakevdp/scipy,person142/scipy,vanpact/scipy,jseabold/scipy,woodscn/scipy,Srisai85/scipy,cpaulik/scipy,Stefan-Endres/scipy,witcxc/scipy,raoulbq/scipy,newemailjdm/scipy,mikebenfield/scipy,dch312/scipy,mgaitan/scipy,sargas/scipy,andim/scipy,piyush0609/scipy,bkendzior/scipy,Gillu13/scipy,Kamp9/scipy,nvoron23/scipy,chatcannon/scipy,scipy/scipy,e-q/scipy,ortylp/scipy,gfyoung/scipy,jonycgn/scipy,zerothi/scipy,ndchorley/scipy,josephcslater/scipy,minhlongdo/scipy,jsilter/scipy,FRidh/scipy,hainm/scipy,jor-/scipy,gfyoung/scipy,Shaswat27/scipy,sargas/scipy,petebachant/scipy,sauliusl/scipy,endolith/scipy,lukauskas/scipy,arokem/scipy,minhlongdo/scipy,aeklant/scipy,mgaitan/scipy,Stefan-Endres/scipy,perimosocordiae/scipy,Srisai85/scipy,WarrenWeckesser/scipy,minhlongdo/scipy,njwilson23/scipy,njwilson23/scipy,andyfaff/scipy,sonnyhu/scipy,andyfaff/scipy,mortada/scipy,matthew-brett/scipy,mhogg/scipy,aman-iitj/scipy,cpaulik/scipy,matthewalbani/scipy,haudren/scipy,befelix/scipy,jsilter/scipy,matthew-brett/scipy,pizzathief/scipy,Newman101/scipy,ogrisel/scipy,fredrikw/scipy,Stefan-Endres/scipy,person142/scipy,gfyoung/scipy,maciejkula/scipy,gef756/scipy,mtrbean/scipy,Kamp9/scipy,andim/scipy,Stefan-Endres/scipy,surhudm/scipy,fredrikw/scipy,anntzer/scipy,rgommers/scipy,pnedunuri/scipy,mtrbean/scipy,sriki18/scipy,befelix/scipy,maniteja123/scipy,vigna/scipy,zerothi/scipy,tylerjereddy/scipy,pschella/scipy,apbard/scipy,mdhaber/scipy,bkendzior/scipy,ogrisel/scipy,ChanderG/scipy,ortylp/scipy,gertingold/scipy,vigna/scipy,lukauskas/scipy,Newman101/scipy,juliantaylor/scipy,grlee77/scipy,gdooper/scipy,anielsen001/scipy,gef756/scipy,anntzer/scipy,WillieMaddox/scipy,zerothi/scipy,josephcslater/scipy,giorgiop/scipy,ilayn/scipy,dch312/scipy,futurulus/scipy,sauliusl/scipy,petebachant/scipy,Srisai85/scipy,josephcslater/scipy,WillieMaddox/scipy,WillieMaddox/scipy,vanpact/scipy,dominicelse/scipy,mhogg/scipy,zaxliu/scipy,tylerjereddy/scipy,bkendzior/scipy,ChanderG/scipy,jor-/scipy,ogrisel/scipy,mingwpy/scipy,vanpact/scipy,jamestwebber/scipy,lukauskas/scipy,pyramania/scipy,scipy/scipy,Srisai85/scipy,maniteja123/scipy,mortonjt/scipy,lukauskas/scipy,ChanderG/scipy,maciejkula/scipy,kleskjr/scipy,njwilson23/scipy,sauliusl/scipy,jseabold/scipy,sriki18/scipy,aarchiba/scipy,vberaudi/scipy,ilayn/scipy,argriffing/scipy,sargas/scipy,argriffing/scipy,tylerjereddy/scipy,vberaudi/scipy,jsilter/scipy,jjhelmus/scipy,andim/scipy,mtrbean/scipy,chatcannon/scipy,giorgiop/scipy,nvoron23/scipy,grlee77/scipy,josephcslater/scipy,mortada/scipy,pnedunuri/scipy,mdhaber/scipy,mdhaber/scipy,piyush0609/scipy,kalvdans/scipy,raoulbq/scipy,nmayorov/scipy,rgommers/scipy,aman-iitj/scipy,rmcgibbo/scipy,nonhermitian/scipy,anielsen001/scipy,vigna/scipy,WillieMaddox/scipy,Dapid/scipy,gdooper/scipy,mhogg/scipy,petebachant/scipy,piyush0609/scipy,ales-erjavec/scipy,ilayn/scipy,arokem/scipy,felipebetancur/scipy,maniteja123/scipy,ndchorley/scipy,gef756/scipy,dominicelse/scipy,andim/scipy,surhudm/scipy,futurulus/scipy,gef756/scipy,person142/scipy,raoulbq/scipy,pschella/scipy,zaxliu/scipy,zaxliu/scipy,niknow/scipy,aeklant/scipy,richardotis/scipy,vhaasteren/scipy,aman-iitj/scipy,mortada/scipy,mortonjt/scipy,behzadnouri/scipy,bkendzior/scipy,mortonjt/scipy,mortonjt/scipy,andyfaff/scipy,giorgiop/scipy,witcxc/scipy,zaxliu/scipy,apbard/scipy,Dapid/scipy,pbrod/scipy,maniteja123/scipy,pizzathief/scipy,Eric89GXL/scipy,rgommers/scipy,woodscn/scipy,mingwpy/scipy,ndchorley/scipy,aarchiba/scipy,chatcannon/scipy,pbrod/scipy,Kamp9/scipy,larsmans/scipy,apbard/scipy,Dapid/scipy,ales-erjavec/scipy,dch312/scipy,jor-/scipy,trankmichael/scipy,mingwpy/scipy,fredrikw/scipy,zxsted/scipy,arokem/scipy,jjhelmus/scipy,Newman101/scipy,fredrikw/scipy,aman-iitj/scipy,Shaswat27/scipy,Stefan-Endres/scipy,nonhermitian/scipy,larsmans/scipy,hainm/scipy,lhilt/scipy,nonhermitian/scipy,argriffing/scipy,kleskjr/scipy,sargas/scipy,pnedunuri/scipy,grlee77/scipy,felipebetancur/scipy,Eric89GXL/scipy,pyramania/scipy,mortonjt/scipy,cpaulik/scipy,mhogg/scipy,dominicelse/scipy,mtrbean/scipy,maciejkula/scipy,kalvdans/scipy,woodscn/scipy,mikebenfield/scipy,nvoron23/scipy,ChanderG/scipy,jjhelmus/scipy,ilayn/scipy,pnedunuri/scipy,futurulus/scipy,witcxc/scipy,dch312/scipy,gef756/scipy,efiring/scipy,cpaulik/scipy,sriki18/scipy,njwilson23/scipy,Shaswat27/scipy,mortada/scipy,bkendzior/scipy,jseabold/scipy,e-q/scipy,newemailjdm/scipy,endolith/scipy,behzadnouri/scipy,efiring/scipy,anielsen001/scipy,mikebenfield/scipy,pyramania/scipy,futurulus/scipy,Newman101/scipy,njwilson23/scipy,anntzer/scipy,matthewalbani/scipy,gdooper/scipy,fernand/scipy,mtrbean/scipy,perimosocordiae/scipy,kalvdans/scipy,raoulbq/scipy,haudren/scipy,woodscn/scipy,richardotis/scipy,FRidh/scipy,Kamp9/scipy,efiring/scipy,mingwpy/scipy,jamestwebber/scipy,nvoron23/scipy,pnedunuri/scipy,Newman101/scipy,mhogg/scipy,perimosocordiae/scipy,pizzathief/scipy,maniteja123/scipy,ales-erjavec/scipy,mgaitan/scipy,haudren/scipy,josephcslater/scipy,WillieMaddox/scipy,andim/scipy,giorgiop/scipy,gertingold/scipy,nvoron23/scipy,gfyoung/scipy,zerothi/scipy,nonhermitian/scipy,sauliusl/scipy,ales-erjavec/scipy,grlee77/scipy,rmcgibbo/scipy,pyramania/scipy,aeklant/scipy,ortylp/scipy,felipebetancur/scipy,argriffing/scipy,matthew-brett/scipy,njwilson23/scipy,maciejkula/scipy,ilayn/scipy,jonycgn/scipy,sonnyhu/scipy,sriki18/scipy,vhaasteren/scipy,sonnyhu/scipy,petebachant/scipy,dominicelse/scipy,trankmichael/scipy,piyush0609/scipy,gfyoung/scipy,maniteja123/scipy,befelix/scipy,ChanderG/scipy,jonycgn/scipy,larsmans/scipy,rmcgibbo/scipy,jor-/scipy,mikebenfield/scipy,surhudm/scipy,pschella/scipy,teoliphant/scipy,Gillu13/scipy,mingwpy/scipy,aarchiba/scipy,fernand/scipy,FRidh/scipy,Kamp9/scipy,newemailjdm/scipy,vigna/scipy,Dapid/scipy,newemailjdm/scipy,niknow/scipy,anntzer/scipy,lhilt/scipy,zxsted/scipy,fernand/scipy,jjhelmus/scipy,minhlongdo/scipy,haudren/scipy,petebachant/scipy,hainm/scipy,jseabold/scipy,lhilt/scipy,vberaudi/scipy,FRidh/scipy,ortylp/scipy,richardotis/scipy,surhudm/scipy,nvoron23/scipy,vberaudi/scipy,kleskjr/scipy,vberaudi/scipy,ortylp/scipy,larsmans/scipy,surhudm/scipy,rmcgibbo/scipy,fredrikw/scipy,mortada/scipy,gertingold/scipy,behzadnouri/scipy,rgommers/scipy,kalvdans/scipy,pbrod/scipy,anntzer/scipy,gertingold/scipy,Srisai85/scipy,niknow/scipy,Eric89GXL/scipy,pizzathief/scipy,gdooper/scipy,aeklant/scipy,kleskjr/scipy,jor-/scipy,anielsen001/scipy,juliantaylor/scipy,pnedunuri/scipy,felipebetancur/scipy,minhlongdo/scipy,Gillu13/scipy,anielsen001/scipy,chatcannon/scipy,scipy/scipy,efiring/scipy,tylerjereddy/scipy,woodscn/scipy,chatcannon/scipy,anielsen001/scipy,apbard/scipy,jseabold/scipy,surhudm/scipy,ortylp/scipy,aman-iitj/scipy,sauliusl/scipy,scipy/scipy,ChanderG/scipy,e-q/scipy,Shaswat27/scipy,matthew-brett/scipy,sonnyhu/scipy,aeklant/scipy,nmayorov/scipy,dominicelse/scipy,zxsted/scipy,mortada/scipy,perimosocordiae/scipy,jakevdp/scipy,jjhelmus/scipy,ilayn/scipy,lhilt/scipy,jamestwebber/scipy
|
---
+++
@@ -1,16 +1,29 @@
major = 0
minor = 3
-micro = 3
+micro = 2
#release_level = 'alpha'
release_level=''
-from __cvs_version__ import cvs_version
-cvs_minor = cvs_version[-3]
-cvs_serial = cvs_version[-1]
+try:
+ from __cvs_version__ import cvs_version
+ cvs_minor = cvs_version[-3]
+ cvs_serial = cvs_version[-1]
+except ImportError,msg:
+ print msg
+ cvs_minor = 0
+ cvs_serial = 0
-if release_level:
- scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
+if cvs_minor or cvs_serial:
+ if release_level:
+ scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
+ else:
+ scipy_version = '%(major)d.%(minor)d.%(micro)d'\
+ '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
else:
- scipy_version = '%(major)d.%(minor)d.%(micro)d'\
- '_%(cvs_minor)d.%(cvs_serial)d' % (locals ())
+ if release_level:
+ scipy_version = '%(major)d.%(minor)d.%(micro)d_%(release_level)s'\
+ % (locals ())
+ else:
+ scipy_version = '%(major)d.%(minor)d.%(micro)d'\
+ % (locals ())
|
a895661f7ce1a814f308dbe8b5836a4cdb472c8c
|
cla_public/apps/base/filters.py
|
cla_public/apps/base/filters.py
|
import re
from cla_public.apps.base import base
@base.app_template_filter()
def matches(value, pattern):
return bool(re.search(pattern, value))
|
from cla_public.apps.base import base
@base.app_template_filter()
def test(value):
return value
|
Revert "BE: Update custom template filter"
|
Revert "BE: Update custom template filter"
This reverts commit ea0c0beb1d2aa0d5970b629ac06e6f9b9708bfdd.
|
Python
|
mit
|
ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public,ministryofjustice/cla_public
|
---
+++
@@ -1,6 +1,5 @@
-import re
from cla_public.apps.base import base
@base.app_template_filter()
-def matches(value, pattern):
- return bool(re.search(pattern, value))
+def test(value):
+ return value
|
8e4e12b3c9d64a8c6771b9deb7613c3653f47656
|
rpihelper/transmission/tasks.py
|
rpihelper/transmission/tasks.py
|
# -*- coding: utf-8 -*-
from rpihelper.celery import current_app, celery
from rpihelper.dropboxclient.logic import Client as DropBoxClient
from rpihelper.transmission.logic import (
transmissionrpc_client, transmissionrpc_add_torrent,
)
__all__ = (
'check_torrent_files',
)
@celery.task
def check_torrent_files():
tc = transmissionrpc_client()
if not tc:
current_app.logger.info('No connetion to remote transmission, stop task.')
return
dbc = DropBoxClient()
for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']):
file_url = dbc.file_url(f)
success = transmissionrpc_add_torrent(tc, file_url)
if success:
dbc.rm_file(f)
current_app.logger.info('Successfully added torrent "%s".' % file_url)
else:
current_app.logger.info('Torrent "%s" not added, skip it.' % file_url)
|
# -*- coding: utf-8 -*-
from tempfile import NamedTemporaryFile
from rpihelper.celery import current_app, celery
from rpihelper.dropboxclient.logic import Client as DropBoxClient
from rpihelper.transmission.logic import (
transmissionrpc_client, transmissionrpc_add_torrent,
)
__all__ = (
'check_torrent_files',
)
@celery.task
def check_torrent_files():
tc = transmissionrpc_client()
if not tc:
current_app.logger.info('No connetion to remote transmission, stop task.')
return
dbc = DropBoxClient()
for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']):
with NamedTemporaryFile() as tf:
tf.write(dbc.file(f))
success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name)
if success:
dbc.rm_file(f)
current_app.logger.info('Successfully added torrent "%s".' % f)
else:
current_app.logger.info('Torrent "%s" not added, skip it.' % f)
|
Fix transmission task for torrent files
|
Fix transmission task for torrent files
|
Python
|
mit
|
Gr1N/rpihelper,Gr1N/rpihelper
|
---
+++
@@ -1,4 +1,6 @@
# -*- coding: utf-8 -*-
+
+from tempfile import NamedTemporaryFile
from rpihelper.celery import current_app, celery
from rpihelper.dropboxclient.logic import Client as DropBoxClient
@@ -20,10 +22,12 @@
dbc = DropBoxClient()
for f in dbc.folder(current_app.config['TRANSMISSION_DROPBOX_TORRENTS_FOLDER']):
- file_url = dbc.file_url(f)
- success = transmissionrpc_add_torrent(tc, file_url)
+ with NamedTemporaryFile() as tf:
+ tf.write(dbc.file(f))
+ success = transmissionrpc_add_torrent(tc, 'file://%s' % tf.name)
+
if success:
dbc.rm_file(f)
- current_app.logger.info('Successfully added torrent "%s".' % file_url)
+ current_app.logger.info('Successfully added torrent "%s".' % f)
else:
- current_app.logger.info('Torrent "%s" not added, skip it.' % file_url)
+ current_app.logger.info('Torrent "%s" not added, skip it.' % f)
|
f672d140987614c5e4e80114cf28f2f6350be233
|
pyBattleship.py
|
pyBattleship.py
|
import boards
def main():
playerDead = False
enemyDead = False
enemyBoard = boards.makeEnemyBoard()
enemyLocations = boards.setupEnemyBoard()
playerBoard = boards.makePlayerBoard()
print("----BATTLESHIP----")
boards.printBoards(enemyBoard, playerBoard)
while not playerDead and not enemyDead:
row = int(input("Guess row: "))
col = int(input("Guess coloumn: "))
#Make true for testing purposes
enemyDead = True
if enemyDead:
print("You win!")
else:
print("You lose!")
main()
|
import boards
def main():
MAX_HITS = 17
enemyDead = False
playerDead = False
hitsOnEnemy = 0
hitsOnPlayer = 0
turn = 1
enemyBoard = boards.makeEnemyBoard()
enemyLocations = boards.setupEnemyBoard()
playerBoard = boards.makePlayerBoard()
print("----BATTLESHIP----")
while not playerDead and not enemyDead:
boards.printBoards(enemyBoard, playerBoard)
print("Turn " + str(turn))
row = int(input("Guess row: ")) - 1
col = int(input("Guess coloumn: ")) - 1
#Player choice evaluated
if enemyLocations[row][col] == "o":
#It's a hit!
enemyBoard[row][col] = "*"
hitsOnEnemy += 1
else:
if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1):
print("Oops, that's not even in the ocean.")
elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"):
print("You guessed that one already.")
else:
#It's a miss
enemyBoard[row][col] = "X"
#Check if either player is dead
if hitsOnEnemy == MAX_HITS:
enemyDead = True
elif hitsOnPlayer == MAX_HITS:
playerDead = True
turn += 1
#Make true for testing purposes
enemyDead = True
if enemyDead:
print("YOU WIN!")
else:
print("YOU LOSE!")
main()
|
Add player input evaluation, determines hit/miss
|
Add player input evaluation, determines hit/miss
|
Python
|
apache-2.0
|
awhittle3/pyBattleship
|
---
+++
@@ -1,26 +1,54 @@
import boards
def main():
+ MAX_HITS = 17
+ enemyDead = False
playerDead = False
- enemyDead = False
+ hitsOnEnemy = 0
+ hitsOnPlayer = 0
+ turn = 1
enemyBoard = boards.makeEnemyBoard()
enemyLocations = boards.setupEnemyBoard()
playerBoard = boards.makePlayerBoard()
print("----BATTLESHIP----")
- boards.printBoards(enemyBoard, playerBoard)
-
+
while not playerDead and not enemyDead:
- row = int(input("Guess row: "))
- col = int(input("Guess coloumn: "))
+ boards.printBoards(enemyBoard, playerBoard)
+ print("Turn " + str(turn))
+ row = int(input("Guess row: ")) - 1
+ col = int(input("Guess coloumn: ")) - 1
+
+ #Player choice evaluated
+ if enemyLocations[row][col] == "o":
+ #It's a hit!
+ enemyBoard[row][col] = "*"
+ hitsOnEnemy += 1
+
+ else:
+ if(row < 0 or row > boards.BOARD_SIZE - 1) or (col < 0 or col > boards.BOARD_SIZE - 1):
+ print("Oops, that's not even in the ocean.")
+ elif(enemyBoard[row][col] == "X" or enemyBoard[row][col] == "*"):
+ print("You guessed that one already.")
+ else:
+ #It's a miss
+ enemyBoard[row][col] = "X"
+
+ #Check if either player is dead
+ if hitsOnEnemy == MAX_HITS:
+ enemyDead = True
+ elif hitsOnPlayer == MAX_HITS:
+ playerDead = True
+
+ turn += 1
#Make true for testing purposes
enemyDead = True
if enemyDead:
- print("You win!")
+ print("YOU WIN!")
else:
- print("You lose!")
+ print("YOU LOSE!")
main()
|
a365d9a9021b9500475d4f54b7c29cede0064017
|
pynmea2/types/proprietary/grm.py
|
pynmea2/types/proprietary/grm.py
|
# Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
# Garmin
from ... import nmea
class GRM(nmea.ProprietarySentence):
sentence_types = {}
def __new__(_cls, manufacturer, data):
name = manufacturer + data[0]
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
def __init__(self, manufacturer, data):
self.sentence_type = manufacturer + data[1]
super(GRM, self).__init__(manufacturer, data[2:])
class GRME(GRM):
""" GARMIN Estimated position error
"""
fields = (
("Estimated Horiz. Position Error", "hpe"),
("Estimated Horiz. Position Error Unit (M)", "hpe_unit"),
("Estimated Vert. Position Error", "vpe"),
("Estimated Vert. Position Error Unit (M)", "vpe_unit"),
("Estimated Horiz. Position Error", "osepe"),
("Overall Spherical Equiv. Position Error", "osepe_unit")
)
class GRMM(GRM):
""" GARMIN Map Datum
"""
fields = (
('Currently Active Datum', 'datum'),
)
class GRMZ(GRM):
""" GARMIN Altitude Information
"""
fields = (
("Altitude", "altitude"),
("Altitude Units (Feet)", "altitude_unit"),
("Positional Fix Dimension (2=user, 3=GPS)", "pos_fix_dim")
)
|
Add __init__ method to fix parsing
|
Add __init__ method to fix parsing
Ported this init method from ubx.py. This has the effect of removing the sentence sub-type from the data list.
Addresses issue #44
|
Python
|
mit
|
silentquasar/pynmea2,Knio/pynmea2
|
---
+++
@@ -9,6 +9,9 @@
cls = _cls.sentence_types.get(name, _cls)
return super(GRM, cls).__new__(cls)
+ def __init__(self, manufacturer, data):
+ self.sentence_type = manufacturer + data[1]
+ super(GRM, self).__init__(manufacturer, data[2:])
class GRME(GRM):
""" GARMIN Estimated position error
|
12d04493f1bb9b79f2f4adf639fdb07619539305
|
catwatch/blueprints/admin/models.py
|
catwatch/blueprints/admin/models.py
|
from sqlalchemy import func
from catwatch.blueprints.user.models import db, User
from catwatch.blueprints.issue.models import Issue
class Dashboard(object):
@classmethod
def group_and_count_users(cls):
"""
Perform a group by/count on all user types.
:return: List of results
"""
count = func.count(User.role)
return db.session.query(count, User.role).group_by(User.role).all()
@classmethod
def group_and_count_issues(cls):
"""
Perform a group by/count on all user types.
:return: List of results
"""
count = func.count(Issue.status)
return db.session.query(count, Issue.status).group_by(
Issue.status).all()
|
from sqlalchemy import func
from catwatch.blueprints.user.models import db, User
from catwatch.blueprints.issue.models import Issue
class Dashboard(object):
@classmethod
def group_and_count_users(cls):
"""
Perform a group by/count on all user types.
:return: List of results
"""
count = func.count(User.role)
return db.session.query(count, User.role).group_by(User.role).all()
@classmethod
def group_and_count_issues(cls):
"""
Perform a group by/count on all issue types.
:return: List of results
"""
count = func.count(Issue.status)
return db.session.query(count, Issue.status).group_by(
Issue.status).all()
|
Fix inaccurate docstring for dashboard issue count
|
Fix inaccurate docstring for dashboard issue count
|
Python
|
mit
|
nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,z123/build-a-saas-app-with-flask,nickjj/build-a-saas-app-with-flask
|
---
+++
@@ -18,7 +18,7 @@
@classmethod
def group_and_count_issues(cls):
"""
- Perform a group by/count on all user types.
+ Perform a group by/count on all issue types.
:return: List of results
"""
|
76ca06c26d74aaad1f0773321fdd382b12addcdc
|
src/django_easyfilters/utils.py
|
src/django_easyfilters/utils.py
|
try:
from django.db.models.constants import LOOKUP_SEP
except ImportError: # Django < 1.5 fallback
from django.db.models.sql.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
import six
def python_2_unicode_compatible(klass): # Copied from Django 1.5
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if not six.PY3:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
def get_model_field(model, f):
parts = f.split(LOOKUP_SEP)
opts = model._meta
for name in parts[:-1]:
try:
rel = opts.get_field_by_name(name)[0]
except FieldDoesNotExist:
return None
if isinstance(rel, RelatedObject):
model = rel.model
opts = rel.opts
else:
model = rel.rel.to
opts = model._meta
rel, model, direct, m2m = opts.get_field_by_name(parts[-1])
return rel, m2m
|
try:
from django.db.models.constants import LOOKUP_SEP
except ImportError: # Django < 1.5 fallback
from django.db.models.sql.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
from six import PY3
def python_2_unicode_compatible(klass): # Copied from Django 1.5
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if not PY3:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
def get_model_field(model, f):
parts = f.split(LOOKUP_SEP)
opts = model._meta
for name in parts[:-1]:
rel = opts.get_field_by_name(name)[0]
if isinstance(rel, RelatedObject):
model = rel.model
opts = rel.opts
else:
model = rel.rel.to
opts = model._meta
rel, model, direct, m2m = opts.get_field_by_name(parts[-1])
return rel, m2m
|
Fix error handling in get_model_field (passthrough).
|
Fix error handling in get_model_field (passthrough).
|
Python
|
mit
|
ionelmc/django-easyfilters,ionelmc/django-easyfilters
|
---
+++
@@ -3,7 +3,8 @@
except ImportError: # Django < 1.5 fallback
from django.db.models.sql.constants import LOOKUP_SEP
from django.db.models.related import RelatedObject
-import six
+from six import PY3
+
def python_2_unicode_compatible(klass): # Copied from Django 1.5
"""
@@ -13,7 +14,7 @@
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
- if not six.PY3:
+ if not PY3:
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
@@ -22,10 +23,7 @@
parts = f.split(LOOKUP_SEP)
opts = model._meta
for name in parts[:-1]:
- try:
- rel = opts.get_field_by_name(name)[0]
- except FieldDoesNotExist:
- return None
+ rel = opts.get_field_by_name(name)[0]
if isinstance(rel, RelatedObject):
model = rel.model
opts = rel.opts
|
f7c7cf55b6536fd5646272b700ce42cc02936fbb
|
st2common/st2common/transport/__init__.py
|
st2common/st2common/transport/__init__.py
|
from st2common.transport import actionexecution, publishers
__all__ = ['actionexecution', 'publishers']
|
from st2common.transport import actionexecution, publishers
# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
__all__ = ['actionexecution', 'publishers']
|
Add TODO to capture the fact that design discussion is pending.
|
Add TODO to capture the fact that design discussion is pending.
|
Python
|
apache-2.0
|
pixelrebel/st2,StackStorm/st2,pinterb/st2,jtopjian/st2,tonybaloney/st2,pinterb/st2,Itxaka/st2,emedvedev/st2,jtopjian/st2,lakshmi-kannan/st2,pixelrebel/st2,Plexxi/st2,alfasin/st2,nzlosh/st2,punalpatel/st2,StackStorm/st2,dennybaa/st2,Itxaka/st2,emedvedev/st2,dennybaa/st2,punalpatel/st2,armab/st2,StackStorm/st2,Plexxi/st2,Plexxi/st2,alfasin/st2,nzlosh/st2,Itxaka/st2,alfasin/st2,grengojbo/st2,peak6/st2,peak6/st2,armab/st2,nzlosh/st2,pixelrebel/st2,pinterb/st2,lakshmi-kannan/st2,nzlosh/st2,punalpatel/st2,lakshmi-kannan/st2,tonybaloney/st2,grengojbo/st2,jtopjian/st2,armab/st2,StackStorm/st2,grengojbo/st2,tonybaloney/st2,peak6/st2,Plexxi/st2,emedvedev/st2,dennybaa/st2
|
---
+++
@@ -1,3 +1,5 @@
from st2common.transport import actionexecution, publishers
+# TODO(manas) : Exchanges, Queues and RoutingKey design discussion pending.
+
__all__ = ['actionexecution', 'publishers']
|
b04fcc4a11eec2df0e9b2f8057aff2d073684122
|
config/experiment_config_lib.py
|
config/experiment_config_lib.py
|
import itertools
import string
class ControllerConfig(object):
_port_gen = itertools.count(8888)
def __init__(self, cmdline="", address="127.0.0.1", port=None, nom_port=None):
'''
Store metadata for the controller.
- cmdline is an array of command line tokens.
Note: if you need to pass in the address and port to controller's
command line, use the aliases __address__ and __port__ to have the
values interpolated automatically
- address and port are the sockets switches will bind to
- address and nom_port is the socket the simulator will use to grab the
NOM from (None for no correspondence checking)
'''
self.address = address
if cmdline == "":
raise RuntimeError("Must specify boot parameters.")
self.cmdline_string = cmdline
if not port:
port = self._port_gen.next()
self.port = port
self.nom_port = nom_port
self.cmdline = map(lambda(x): string.replace(x, "__port__", str(port)),
map(lambda(x): string.replace(x, "__address__",
str(address)), cmdline.split()))
@property
def uuid(self):
return (self.address, self.port)
def __repr__(self):
return self.__class__.__name__ + "(cmdline=\"" + self.cmdline_string +\
"\",address=\"" + self.address + "\",port=" + self.port.__repr__() +\
",nom_port=" + self.nom_port.__repr__() + ")"
|
import itertools
import string
class ControllerConfig(object):
_port_gen = itertools.count(8888)
def __init__(self, cmdline="", address="127.0.0.1", port=None):
'''
Store metadata for the controller.
- cmdline is an array of command line tokens.
Note: if you need to pass in the address and port to controller's
command line, use the aliases __address__ and __port__ to have the
values interpolated automatically
- address and port are the sockets switches will bind to
'''
if cmdline == "":
raise RuntimeError("Must specify boot parameters.")
self.cmdline_string = cmdline
self.address = address
if not port:
port = self._port_gen.next()
self.port = port
if "pox" in self.cmdline_string:
self.name = "pox"
self.cmdline = map(lambda(x): string.replace(x, "__port__", str(port)),
map(lambda(x): string.replace(x, "__address__",
str(address)), cmdline.split()))
@property
def uuid(self):
return (self.address, self.port)
def __repr__(self):
return self.__class__.__name__ + "(cmdline=\"" + self.cmdline_string +\
"\",address=\"" + self.address + "\",port=" + self.port.__repr__() + ")"
|
Remove nom_port from options. This should be infered by the controller name. Add a name member for that. For now, infer the controller from the commandline string'
|
Remove nom_port from options. This should be infered by the controller name. Add a name member for that. For now, infer the controller from the commandline string'
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,ucb-sts/sts,jmiserez/sts
|
---
+++
@@ -4,7 +4,7 @@
class ControllerConfig(object):
_port_gen = itertools.count(8888)
- def __init__(self, cmdline="", address="127.0.0.1", port=None, nom_port=None):
+ def __init__(self, cmdline="", address="127.0.0.1", port=None):
'''
Store metadata for the controller.
- cmdline is an array of command line tokens.
@@ -13,17 +13,16 @@
command line, use the aliases __address__ and __port__ to have the
values interpolated automatically
- address and port are the sockets switches will bind to
- - address and nom_port is the socket the simulator will use to grab the
- NOM from (None for no correspondence checking)
'''
- self.address = address
if cmdline == "":
raise RuntimeError("Must specify boot parameters.")
self.cmdline_string = cmdline
+ self.address = address
if not port:
port = self._port_gen.next()
self.port = port
- self.nom_port = nom_port
+ if "pox" in self.cmdline_string:
+ self.name = "pox"
self.cmdline = map(lambda(x): string.replace(x, "__port__", str(port)),
map(lambda(x): string.replace(x, "__address__",
str(address)), cmdline.split()))
@@ -34,5 +33,4 @@
def __repr__(self):
return self.__class__.__name__ + "(cmdline=\"" + self.cmdline_string +\
- "\",address=\"" + self.address + "\",port=" + self.port.__repr__() +\
- ",nom_port=" + self.nom_port.__repr__() + ")"
+ "\",address=\"" + self.address + "\",port=" + self.port.__repr__() + ")"
|
6c86f7ef2ba5acdb387d154a618dd0f6bb65af6c
|
stacker/hooks/route53.py
|
stacker/hooks/route53.py
|
import logging
logger = logging.getLogger(__name__)
from aws_helper.connection import ConnectionManager
from stacker.util import create_route53_zone
def create_domain(region, namespace, mappings, parameters, **kwargs):
conn = ConnectionManager(region)
try:
domain = parameters['BaseDomain']
except KeyError:
logger.error("BaseDomain parameter not provided.")
return False
create_route53_zone(conn.route53, domain)
return True
|
import logging
logger = logging.getLogger(__name__)
from aws_helper.connection import ConnectionManager
from stacker.util import create_route53_zone
def create_domain(region, namespace, mappings, parameters, **kwargs):
conn = ConnectionManager(region)
domain = kwargs.get('domain', parameters.get('BaseDomain'))
if not domain:
logger.error("domain argument or BaseDomain parameter not provided.")
return False
create_route53_zone(conn.route53, domain)
return True
|
Allow creation of domains without BaseDomain
|
Allow creation of domains without BaseDomain
Gives the hook the ability to parse args.
|
Python
|
bsd-2-clause
|
mhahn/stacker,mhahn/stacker,federicobaldo/stacker,EnTeQuAk/stacker,remind101/stacker,remind101/stacker
|
---
+++
@@ -10,10 +10,9 @@
def create_domain(region, namespace, mappings, parameters, **kwargs):
conn = ConnectionManager(region)
- try:
- domain = parameters['BaseDomain']
- except KeyError:
- logger.error("BaseDomain parameter not provided.")
+ domain = kwargs.get('domain', parameters.get('BaseDomain'))
+ if not domain:
+ logger.error("domain argument or BaseDomain parameter not provided.")
return False
create_route53_zone(conn.route53, domain)
return True
|
a01be482c81641624d77f0bfe32ad6f884996aed
|
pydaqmx_helper/getDeviceName.py
|
pydaqmx_helper/getDeviceName.py
|
from PyDAQmx.DAQmxFunctions import *
def getDeviceName():
buffer = create_string_buffer(1024)
# Buffer will break if too many devices, like 10 or 15, make number 100 bigger if you have to
b = c_ulong(1024)
DAQmxGetSysDevNames(buffer, b)
return a.value.decode('utf-8').split(',')[-1].strip()
|
from PyDAQmx.DAQmxFunctions import *
def getDeviceName():
buffer = create_string_buffer(1024)
# Buffer will break if too many devices, like 10 or 15, make number 100 bigger if you have to
b = c_ulong(1024)
DAQmxGetSysDevNames(a, b)
return a.value.decode('utf-8').split(',')[-1].strip()
|
Revert "Reassign a variable following previous commit"
|
Revert "Reassign a variable following previous commit"
This reverts commit 5c963691d3af4cddb0e3090a709110573ac97f6a.
|
Python
|
mit
|
MarcoForte/PyDAQmx_Helper
|
---
+++
@@ -4,5 +4,5 @@
buffer = create_string_buffer(1024)
# Buffer will break if too many devices, like 10 or 15, make number 100 bigger if you have to
b = c_ulong(1024)
- DAQmxGetSysDevNames(buffer, b)
+ DAQmxGetSysDevNames(a, b)
return a.value.decode('utf-8').split(',')[-1].strip()
|
415d0bc36e509303242d32c5fc2f994346cbdb3c
|
pastas/version.py
|
pastas/version.py
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.15.0'
|
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
__version__ = '0.16.0b'
|
Update dev branch to 0.16.0b
|
Update dev branch to 0.16.0b
|
Python
|
mit
|
pastas/pasta,pastas/pastas
|
---
+++
@@ -1,3 +1,3 @@
# This is the only location where the version will be written and changed.
# Based on https://packaging.python.org/single_source_version/
-__version__ = '0.15.0'
+__version__ = '0.16.0b'
|
1f1699bb7b8cf537b8d2e206ac0ff03b74d5b3a7
|
container_files/init/mldb_finish.py
|
container_files/init/mldb_finish.py
|
#!/usr/bin/env python
# Copyright Datacratic 2016
# Author: Jean Raby <jean@datacratic.com>
# This script is called by runsv when the mldb service exits.
# See http://smarden.org/runit/runsv.8.html for more details.
# Two arguments are given to ./finish:
# The first one is ./run's exit code, or -1 if ./run didn't exit normally.
# The second one is the least significant byte of the exit status as
# determined by waitpid(2); for instance it is 0 if ./run exited normally,
# and the signal number if ./run was terminated by a signal.
# If runsv cannot start ./run for some reason, the exit code is 111 and the status is 0.
import os
import sys
msg = ""
if len(sys.argv) == 3:
exit_code = sys.argv[1]
status_code = sys.argv[2]
if os.WIFSIGNALED(int(status_code)):
sig = os.WTERMSIG(int(status_code))
msg = " Killed by signal %d." % sig
print "MLDB exited.%s" % (msg)
|
#!/usr/bin/env python
# Copyright Datacratic 2016
# Author: Jean Raby <jean@datacratic.com>
# This script is called by runsv when the mldb service exits.
# See http://smarden.org/runit/runsv.8.html for more details.
# Two arguments are given to ./finish:
# The first one is ./run's exit code, or -1 if ./run didn't exit normally.
# The second one is the least significant byte of the exit status as
# determined by waitpid(2); for instance it is 0 if ./run exited normally,
# and the signal number if ./run was terminated by a signal.
# If runsv cannot start ./run for some reason, the exit code is 111 and the status is 0.
import os
import sys
sigmap = { 4: "SIGILL: illegal instruction - internal error",
6: "SIGABRT: abort(3) called - internal error",
9: "SIGKILL: killed from outside, did we run out of memory (OOM killed?)",
11: "SIGSEGV: segfault - internal error",
15: "SIGTERM: regular shutdown",
}
msg = ""
sig = None
if len(sys.argv) == 3:
exit_code = sys.argv[1]
status_code = sys.argv[2]
if os.WIFSIGNALED(int(status_code)):
sig = os.WTERMSIG(int(status_code))
print # we like space
print
if sig == None:
print "MLDB exited."
else:
msg = "MLDB exited due to signal %d." % (sig)
if sig in sigmap:
msg += " " + sigmap[sig]
print msg
print
print
|
Add signal map and human readable description
|
Add signal map and human readable description
|
Python
|
apache-2.0
|
mldbai/mldb,mldbai/mldb,mldbai/mldb,mldbai/mldb,mldbai/mldb,mldbai/mldb,mldbai/mldb
|
---
+++
@@ -14,13 +14,30 @@
import os
import sys
+sigmap = { 4: "SIGILL: illegal instruction - internal error",
+ 6: "SIGABRT: abort(3) called - internal error",
+ 9: "SIGKILL: killed from outside, did we run out of memory (OOM killed?)",
+ 11: "SIGSEGV: segfault - internal error",
+ 15: "SIGTERM: regular shutdown",
+ }
+
msg = ""
+sig = None
if len(sys.argv) == 3:
exit_code = sys.argv[1]
status_code = sys.argv[2]
if os.WIFSIGNALED(int(status_code)):
sig = os.WTERMSIG(int(status_code))
- msg = " Killed by signal %d." % sig
-print "MLDB exited.%s" % (msg)
+print # we like space
+print
+if sig == None:
+ print "MLDB exited."
+else:
+ msg = "MLDB exited due to signal %d." % (sig)
+ if sig in sigmap:
+ msg += " " + sigmap[sig]
+ print msg
+print
+print
|
cec8a1d5afa936ce7df5bae8b7cead9564ac7b97
|
youmap/views.py
|
youmap/views.py
|
from django.views.generic import TemplateView
from chickpea.models import Map
class Home(TemplateView):
template_name = "youmap/home.html"
list_template_name = "chickpea/map_list.html"
def get_context_data(self, **kwargs):
maps = Map.objects.order_by('-modified_at')[:100]
return {
"maps": maps
}
def get_template_names(self):
"""
Dispatch template according to the kind of request: ajax or normal.
"""
if self.request.is_ajax():
return [self.list_template_name]
else:
return [self.template_name]
home = Home.as_view()
|
from django.views.generic import TemplateView
from chickpea.models import Map
class Home(TemplateView):
template_name = "youmap/home.html"
list_template_name = "chickpea/map_list.html"
def get_context_data(self, **kwargs):
maps = Map.objects.order_by('-pk')[:100]
return {
"maps": maps
}
def get_template_names(self):
"""
Dispatch template according to the kind of request: ajax or normal.
"""
if self.request.is_ajax():
return [self.list_template_name]
else:
return [self.template_name]
home = Home.as_view()
|
Order by modified_at create problem with endless_navigation and browser cache
|
Order by modified_at create problem with endless_navigation and browser cache
Need to handle cache properly before
|
Python
|
agpl-3.0
|
diraol/umap
|
---
+++
@@ -8,7 +8,7 @@
list_template_name = "chickpea/map_list.html"
def get_context_data(self, **kwargs):
- maps = Map.objects.order_by('-modified_at')[:100]
+ maps = Map.objects.order_by('-pk')[:100]
return {
"maps": maps
}
|
b6052b35235a09f508aa75012badc830df8bcfa0
|
sethji/views/sync.py
|
sethji/views/sync.py
|
# -*- coding: utf-8 -*-
#
from sethji.model.sync import SyncAws
from sethji.views.account import requires_login
from flask import Blueprint, redirect, url_for, flash, request
mod = Blueprint('sync', __name__, url_prefix='/sync')
@mod.route("/", methods=["POST"])
@requires_login
def sync():
sync_aws = SyncAws()
sync_aws.background_sync()
flash(u'AWS Sync Initiated')
return redirect(request.path)
def is_sync_in_progress():
sync_aws = SyncAws()
return sync_aws.is_sync_in_progress()
|
# -*- coding: utf-8 -*-
#
from sethji.model.sync import SyncAws
from sethji.views.account import requires_login
from flask import Blueprint, redirect, url_for, flash
mod = Blueprint('sync', __name__, url_prefix='/sync')
@mod.route("/", methods=["POST"])
@requires_login
def sync():
sync_aws = SyncAws()
sync_aws.background_sync()
flash(u'AWS Sync Initiated')
return redirect(url_for('home'))
def is_sync_in_progress():
sync_aws = SyncAws()
return sync_aws.is_sync_in_progress()
|
Revert "Redirect to same page"
|
Revert "Redirect to same page"
This reverts commit 1f49ee31c21e1bebefb3be9ecc9e4df7c115a5ef.
|
Python
|
mit
|
rohit01/sethji,rohit01/sethji,rohit01/sethji
|
---
+++
@@ -3,7 +3,7 @@
from sethji.model.sync import SyncAws
from sethji.views.account import requires_login
-from flask import Blueprint, redirect, url_for, flash, request
+from flask import Blueprint, redirect, url_for, flash
mod = Blueprint('sync', __name__, url_prefix='/sync')
@@ -15,7 +15,7 @@
sync_aws = SyncAws()
sync_aws.background_sync()
flash(u'AWS Sync Initiated')
- return redirect(request.path)
+ return redirect(url_for('home'))
def is_sync_in_progress():
|
fa8e30c2b41e8078ca87a73cc90c2652c46b1ee0
|
corvus/console.py
|
corvus/console.py
|
import sys
from corvus.client import Corvus
def main():
corvus = Corvus()
corvus.init_drive()
total_sectors = corvus.get_drive_capacity(1)
with open("image.bin", "wb") as f:
for i in range(total_sectors):
orig_data = corvus.read_sector_512(1, i)
corvus.write_sector_512(1, i, orig_data)
data = corvus.read_sector_512(1, i)
if data != orig_data: raise ValueError(i)
f.write(''.join([chr(d) for d in data]))
f.flush()
sys.stdout.write("\r%d bytes" % (i * 512))
sys.stdout.flush()
if __name__ == "__main__":
main()
|
import sys
from corvus.client import Corvus
def backup(corvus, filename):
total_sectors = corvus.get_drive_capacity(1)
with open(filename, "wb") as f:
for i in range(total_sectors):
data = corvus.read_sector_512(1, i)
f.write(''.join([ chr(d) for d in data ]))
sys.stdout.write("\r%d bytes" % (i * 512))
sys.stdout.flush()
sys.stdout.write("\n")
def restore(corvus, filename):
total_sectors = corvus.get_drive_capacity(1)
with open(filename, "rb") as f:
for i in range(total_sectors):
data = [ ord(d) for d in f.read(512) ]
if len(data) < 512:
break
corvus.write_sector_512(1, i, data)
sys.stdout.write("\r%d bytes" % (i * 512))
sys.stdout.flush()
sys.stdout.write("\n")
def main():
corvus = Corvus()
corvus.init_drive()
backup(corvus, "image.bin")
if __name__ == "__main__":
main()
|
Add backup and restore functions
|
Add backup and restore functions
|
Python
|
bsd-3-clause
|
mnaberez/corvus
|
---
+++
@@ -1,20 +1,32 @@
import sys
from corvus.client import Corvus
+
+def backup(corvus, filename):
+ total_sectors = corvus.get_drive_capacity(1)
+ with open(filename, "wb") as f:
+ for i in range(total_sectors):
+ data = corvus.read_sector_512(1, i)
+ f.write(''.join([ chr(d) for d in data ]))
+ sys.stdout.write("\r%d bytes" % (i * 512))
+ sys.stdout.flush()
+ sys.stdout.write("\n")
+
+def restore(corvus, filename):
+ total_sectors = corvus.get_drive_capacity(1)
+ with open(filename, "rb") as f:
+ for i in range(total_sectors):
+ data = [ ord(d) for d in f.read(512) ]
+ if len(data) < 512:
+ break
+ corvus.write_sector_512(1, i, data)
+ sys.stdout.write("\r%d bytes" % (i * 512))
+ sys.stdout.flush()
+ sys.stdout.write("\n")
def main():
corvus = Corvus()
corvus.init_drive()
- total_sectors = corvus.get_drive_capacity(1)
- with open("image.bin", "wb") as f:
- for i in range(total_sectors):
- orig_data = corvus.read_sector_512(1, i)
- corvus.write_sector_512(1, i, orig_data)
- data = corvus.read_sector_512(1, i)
- if data != orig_data: raise ValueError(i)
- f.write(''.join([chr(d) for d in data]))
- f.flush()
- sys.stdout.write("\r%d bytes" % (i * 512))
- sys.stdout.flush()
+ backup(corvus, "image.bin")
if __name__ == "__main__":
main()
|
adb4e5bcf08dc8703cbd5dbd651e1fef3afbbbad
|
simphony/__init__.py
|
simphony/__init__.py
|
# -*- coding: utf-8 -*-
"""SimPhoNy: Simulation framework for multi-scale phenomena in micro-
and nanosystems.
SimPhoNy is an EU-project funded by the 7th Framework Programme (Project
number 604005) under the call NMP.2013.1.4-1: "Development of an integrated
multi-scale modelling environment for nanomaterials and systems by design".
SimPhoNy is a collaborative research project running from January 1st 2014
until December 31st 2016.
For more information see: http://www.simphony-project.eu/.
:copyright: (c) 2014, 2015 SimPhoNy Consortium
:license: BSD, see LICENSE for more details.
"""
|
# -*- coding: utf-8 -*-
"""SimPhoNy: Simulation framework for multi-scale phenomena in micro-
and nanosystems.
SimPhoNy is an EU-project funded by the 7th Framework Programme (Project
number 604005) under the call NMP.2013.1.4-1: "Development of an integrated
multi-scale modelling environment for nanomaterials and systems by design".
SimPhoNy is a collaborative research project running from January 1st 2014
until December 31st 2016.
For more information see: http://www.simphony-project.eu/.
:copyright: (c) 2014, 2015, 2016 SimPhoNy Consortium
:license: BSD, see LICENSE for more details.
"""
|
Add 2016 to copyright notice
|
Add 2016 to copyright notice
|
Python
|
bsd-2-clause
|
simphony/simphony-common
|
---
+++
@@ -10,6 +10,6 @@
For more information see: http://www.simphony-project.eu/.
-:copyright: (c) 2014, 2015 SimPhoNy Consortium
+:copyright: (c) 2014, 2015, 2016 SimPhoNy Consortium
:license: BSD, see LICENSE for more details.
"""
|
cdcecaad78b8acf1c067c5eb631e6c3ae7167c2c
|
elasticsearch_dsl/__init__.py
|
elasticsearch_dsl/__init__.py
|
from .query import Q
from .filter import F
from .aggs import A
from .function import SF
from .search import Search
from .field import *
from .document import DocType
from .mapping import Mapping
from .index import Index
from .analysis import analyzer, token_filter, char_filter, tokenizer
VERSION = (0, 0, 4, 'dev')
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
|
from .query import Q
from .filter import F
from .aggs import A
from .function import SF
from .search import Search
from .field import *
from .document import DocType, MetaField
from .mapping import Mapping
from .index import Index
from .analysis import analyzer, token_filter, char_filter, tokenizer
VERSION = (0, 0, 4, 'dev')
__version__ = VERSION
__versionstr__ = '.'.join(map(str, VERSION))
|
Include MetaField in things importable from root
|
Include MetaField in things importable from root
|
Python
|
apache-2.0
|
sangheestyle/elasticsearch-dsl-py,3lnc/elasticsearch-dsl-py,elastic/elasticsearch-dsl-py,harshmaur/elasticsearch-dsl-py,solarissmoke/elasticsearch-dsl-py,Quiri/elasticsearch-dsl-py,harshit298/elasticsearch-dsl-py,f-santos/elasticsearch-dsl-py,reflection/elasticsearch-dsl-py,REVLWorld/elasticsearch-dsl-py,ziky90/elasticsearch-dsl-py,avishai-ish-shalom/elasticsearch-dsl-py,gkirkpatrick/elasticsearch-dsl-py,hampsterx/elasticsearch-dsl-py
|
---
+++
@@ -4,7 +4,7 @@
from .function import SF
from .search import Search
from .field import *
-from .document import DocType
+from .document import DocType, MetaField
from .mapping import Mapping
from .index import Index
from .analysis import analyzer, token_filter, char_filter, tokenizer
|
eac383015161f661de33a94dae958a21761071dc
|
zeus/run.py
|
zeus/run.py
|
#!/usr/bin/env python
"""
run.py makes it easy to start Flask's built-in development webserver.
To start a development server, ensure all the necessary packages are
installed: (I suggest setting up a Virtualenv for this)
$ pip install -r requirements.txt
< ... >
$ python run.py
* Running on http://127.0.0.1:5000/
* Restarting with reloader
Author: Zach Anders
Date: 01/26/2015
"""
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint)
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python
"""
run.py makes it easy to start Flask's built-in development webserver.
To start a development server, ensure all the necessary packages are
installed: (I suggest setting up a Virtualenv for this)
$ pip install -r requirements.txt
< ... >
$ python run.py
* Running on http://127.0.0.1:5000/
* Restarting with reloader
Author: Zach Anders
Date: 01/26/2015
"""
from app import app
import config
from rest_api.controller import rest_blueprint
app.register_blueprint(rest_blueprint, url_prefix="/api")
if __name__ == "__main__":
app.run()
|
Move all api routes into /api/
|
Move all api routes into /api/
|
Python
|
bsd-2-clause
|
nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs,nbroeking/OPLabs,jrahm/OPLabs,jrahm/OPLabs,ZachAnders/OPLabs
|
---
+++
@@ -21,7 +21,7 @@
import config
from rest_api.controller import rest_blueprint
-app.register_blueprint(rest_blueprint)
+app.register_blueprint(rest_blueprint, url_prefix="/api")
if __name__ == "__main__":
app.run()
|
b0231d8d8832559a2e5382c3e0ad47e6d6232761
|
src/oscar/apps/offer/signals.py
|
src/oscar/apps/offer/signals.py
|
from django.db.models.signals import post_delete
from django.dispatch import receiver
from oscar.core.loading import get_model
ConditionalOffer = get_model('offer', 'ConditionalOffer')
Condition = get_model('offer', 'Condition')
Benefit = get_model('offer', 'Benefit')
@receiver(post_delete, sender=ConditionalOffer)
def delete_related_useless_conditions_and_benefits(instance, **kwargs):
offer = instance # the object is no longer in the database
condition_id = offer.condition_id
condition = Condition.objects.get(id=condition_id)
condition_is_unique = condition.offers.count() == 0
condition_is_not_custom = condition.proxy_class == ''
if condition_is_not_custom and condition_is_unique:
condition.delete()
benefit_id = offer.benefit_id
benefit = Benefit.objects.get(id=benefit_id)
benefit_is_unique = benefit.offers.count() == 0
benefit_is_not_custom = benefit.proxy_class == ''
if benefit_is_not_custom and benefit_is_unique:
benefit.delete()
|
from django.db.models.signals import post_delete
from django.dispatch import receiver
from oscar.core.loading import get_model
ConditionalOffer = get_model('offer', 'ConditionalOffer')
Condition = get_model('offer', 'Condition')
Benefit = get_model('offer', 'Benefit')
@receiver(post_delete, sender=ConditionalOffer)
def delete_unused_related_conditions_and_benefits(instance, **kwargs):
offer = instance # the object is no longer in the database
condition_id = offer.condition_id
condition = Condition.objects.get(id=condition_id)
condition_is_unique = condition.offers.count() == 0
condition_is_not_custom = condition.proxy_class == ''
if condition_is_not_custom and condition_is_unique:
condition.delete()
benefit_id = offer.benefit_id
benefit = Benefit.objects.get(id=benefit_id)
benefit_is_unique = benefit.offers.count() == 0
benefit_is_not_custom = benefit.proxy_class == ''
if benefit_is_not_custom and benefit_is_unique:
benefit.delete()
|
Rename offer signal to remove unused conditions/benefits
|
Rename offer signal to remove unused conditions/benefits
|
Python
|
bsd-3-clause
|
sasha0/django-oscar,sasha0/django-oscar,sasha0/django-oscar,solarissmoke/django-oscar,solarissmoke/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,solarissmoke/django-oscar,sasha0/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,django-oscar/django-oscar,sonofatailor/django-oscar
|
---
+++
@@ -9,7 +9,7 @@
@receiver(post_delete, sender=ConditionalOffer)
-def delete_related_useless_conditions_and_benefits(instance, **kwargs):
+def delete_unused_related_conditions_and_benefits(instance, **kwargs):
offer = instance # the object is no longer in the database
condition_id = offer.condition_id
|
dda35a476f81be0deb67c1a45320d17dc927788b
|
smile_base/models/ir_config_parameter.py
|
smile_base/models/ir_config_parameter.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api, models, tools
class IrConfigParameter(models.Model):
_inherit = 'ir.config_parameter'
@api.model
def get_param(self, key, default=False):
if key == 'server.environment':
return tools.config.get('server.environment') or default
if key == 'max_upload_size':
return tools.config.get('max_upload_size') or default
return super(IrConfigParameter, self).get_param(key, default)
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 Smile (<http://www.smile.fr>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import api, models, tools
class IrConfigParameter(models.Model):
_inherit = 'ir.config_parameter'
@api.model
def get_param(self, key, default=False):
if key in tools.config.options:
return tools.config.get(key)
return super(IrConfigParameter, self).get_param(key, default)
|
Allow to override any config parameter via config file
|
[IMP] Allow to override any config parameter via config file
|
Python
|
agpl-3.0
|
chadyred/odoo_addons,chadyred/odoo_addons,ovnicraft/odoo_addons,bmya/odoo_addons,chadyred/odoo_addons,odoocn/odoo_addons,tiexinliu/odoo_addons,bmya/odoo_addons,odoocn/odoo_addons,ovnicraft/odoo_addons,tiexinliu/odoo_addons,ovnicraft/odoo_addons,odoocn/odoo_addons,tiexinliu/odoo_addons,bmya/odoo_addons
|
---
+++
@@ -27,8 +27,6 @@
@api.model
def get_param(self, key, default=False):
- if key == 'server.environment':
- return tools.config.get('server.environment') or default
- if key == 'max_upload_size':
- return tools.config.get('max_upload_size') or default
+ if key in tools.config.options:
+ return tools.config.get(key)
return super(IrConfigParameter, self).get_param(key, default)
|
a12ea60be17dabf837984e03162ebff152f0fd21
|
examples/project_one/input.py
|
examples/project_one/input.py
|
def read_file(filename):
# Open the file
file_obj = open(filename)
# Iterate over lines in the file
for line in file_obj:
# Split line by spaces (creates a list)
# Alternatives: split(',')
numbers = line.split()
if len(numbers) != 2:
# Convert strings to numbers
numbers2 = []
for number in numbers:
# Convert number to float
number = float(number)
# Append to temperary list
numbers2.append(number)
# Replace numbers by numbers2
numbers = numbers2
else:
# We're processing a header
print 'Skipping header line'
return contents
# Just for debugging purposes
read_file('data.txt')
|
def read_file(filename):
# Open the file
file_obj = open(filename)
# Iterate over lines in the file
for line in file_obj:
# Split line by spaces (creates a list)
# Alternatives: split(',')
numbers = line.split()
if len(numbers) != 2:
# Convert strings to numbers
# map() calls the first argument on every item in the second
# argument and returns a list of results.
numbers = map(float, numbers)
else:
# We're processing a header
print 'Skipping header line'
return contents
# Just for debugging purposes
read_file('data.txt')
|
Use map for smarter iteration.
|
Use map for smarter iteration.
|
Python
|
mit
|
dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops,dokterbob/slf-programming-workshops
|
---
+++
@@ -12,17 +12,9 @@
if len(numbers) != 2:
# Convert strings to numbers
- numbers2 = []
- for number in numbers:
- # Convert number to float
- number = float(number)
-
- # Append to temperary list
- numbers2.append(number)
-
- # Replace numbers by numbers2
- numbers = numbers2
-
+ # map() calls the first argument on every item in the second
+ # argument and returns a list of results.
+ numbers = map(float, numbers)
else:
# We're processing a header
print 'Skipping header line'
|
48da784fbf74d47f22f2527695b2d9de1528a484
|
flow_workflow/commands/submit_workflow.py
|
flow_workflow/commands/submit_workflow.py
|
from flow_workflow.commands.launch_base import LaunchWorkflowCommandBase
from flow.configuration.inject.broker import BrokerConfiguration
from flow.configuration.inject.redis_conf import RedisConfiguration
from flow.configuration.inject.service_locator import ServiceLocatorConfiguration
from twisted.internet import defer
class SubmitWorkflowCommand(LaunchWorkflowCommandBase):
injector_modules = [
BrokerConfiguration,
RedisConfiguration,
ServiceLocatorConfiguration,
]
local_workflow = True
def setup_services(self, net):
pass
def wait_for_results(self, net, block):
if block:
return self.setup_completion_handler(net)
else:
return defer.succeed(block)
|
from flow_workflow.commands.launch_base import LaunchWorkflowCommandBase
from flow.configuration.inject.broker import BrokerConfiguration
from flow.configuration.inject.redis_conf import RedisConfiguration
from flow.configuration.inject.service_locator import ServiceLocatorConfiguration
from twisted.internet import defer
class SubmitWorkflowCommand(LaunchWorkflowCommandBase):
injector_modules = [
BrokerConfiguration,
RedisConfiguration,
ServiceLocatorConfiguration,
]
local_workflow = False
def setup_services(self, net):
pass
def wait_for_results(self, net, block):
if block:
return self.setup_completion_handler(net)
else:
return defer.succeed(block)
|
Make submit workflow's local_workflow be False
|
Make submit workflow's local_workflow be False
|
Python
|
agpl-3.0
|
genome/flow-workflow,genome/flow-workflow,genome/flow-workflow
|
---
+++
@@ -12,7 +12,7 @@
ServiceLocatorConfiguration,
]
- local_workflow = True
+ local_workflow = False
def setup_services(self, net):
pass
|
9f3f9229718694570d7b56fe6cd63478f59f0de5
|
us_ignite/common/tests/utils.py
|
us_ignite/common/tests/utils.py
|
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
class TestMessagesBackend(BaseStorage):
"""
When unit testing a django view the ``messages`` middleware
will be missing. This backend will provision a simple
messaging midleware.
Usage::
from django.test import client
from us_ignite.common.tests import utils
factory = client.RequestFactory()
request = factory.get('/')
request._messages = utils.TestMessagesBackend(request)
"""
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
|
from django.contrib.auth.models import User, AnonymousUser
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
from django.test import client
from mock import Mock
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
def get_anon_mock():
"""Generate an anon user mock."""
return AnonymousUser()
def get_user_mock():
"""Generate an authed user mock."""
user = Mock(spec=User)
user.is_authenticated.return_value = True
return user
def get_request(method, *args, **kwargs):
"""Generatse a request with the given ``method``."""
user = kwargs.pop('user', None)
factory = client.RequestFactory()
method_action = getattr(factory, method)
request = method_action(*args, **kwargs)
if user:
request.user = user
return request
class TestMessagesBackend(BaseStorage):
"""
When unit testing a django view the ``messages`` middleware
will be missing. This backend will provision a simple
messaging midleware.
Usage::
from django.test import client
from us_ignite.common.tests import utils
factory = client.RequestFactory()
request = factory.get('/')
request._messages = utils.TestMessagesBackend(request)
"""
def __init__(self, request, *args, **kwargs):
self._loaded_data = []
super(TestMessagesBackend, self).__init__(request, *args, **kwargs)
def add(self, level, message, extra_tags=''):
self._loaded_data.append(
Message(level, message, extra_tags=extra_tags))
|
Add utilities to generate request doubles.
|
Add utilities to generate request doubles.
The new ``get_request`` function is a helper used to generate
request doubles the first argument is the ``method``, an
optional user can be attached to the request.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
---
+++
@@ -1,10 +1,38 @@
+from django.contrib.auth.models import User, AnonymousUser
from django.core.urlresolvers import reverse
from django.contrib.messages.storage.base import BaseStorage, Message
+
+from django.test import client
+
+from mock import Mock
def get_login_url(url):
"""Returns an expected login URL."""
return ('%s?next=%s' % (reverse('auth_login'), url))
+
+
+def get_anon_mock():
+ """Generate an anon user mock."""
+ return AnonymousUser()
+
+
+def get_user_mock():
+ """Generate an authed user mock."""
+ user = Mock(spec=User)
+ user.is_authenticated.return_value = True
+ return user
+
+
+def get_request(method, *args, **kwargs):
+ """Generatse a request with the given ``method``."""
+ user = kwargs.pop('user', None)
+ factory = client.RequestFactory()
+ method_action = getattr(factory, method)
+ request = method_action(*args, **kwargs)
+ if user:
+ request.user = user
+ return request
class TestMessagesBackend(BaseStorage):
|
38221a3d8df945981f9595842871b5dae6a68c0f
|
user_management/models/tests/factories.py
|
user_management/models/tests/factories.py
|
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
password = factory.PostGenerationMethodCall('set_password', None)
|
import factory
from django.contrib.auth import get_user_model
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
is_active = True
@factory.post_generation
def password(self, create, extracted='default password', **kwargs):
self.raw_password = extracted
self.set_password(self.raw_password)
if create:
self.save()
|
Add raw_password to Users in tests
|
Add raw_password to Users in tests
|
Python
|
bsd-2-clause
|
incuna/django-user-management,incuna/django-user-management
|
---
+++
@@ -7,4 +7,11 @@
FACTORY_FOR = get_user_model()
name = factory.Sequence(lambda i: 'Test User {}'.format(i))
email = factory.Sequence(lambda i: 'email{}@example.com'.format(i))
- password = factory.PostGenerationMethodCall('set_password', None)
+ is_active = True
+
+ @factory.post_generation
+ def password(self, create, extracted='default password', **kwargs):
+ self.raw_password = extracted
+ self.set_password(self.raw_password)
+ if create:
+ self.save()
|
39d377c462ac031203877a2c13df79be41c68585
|
tests/fileutils-tests.py
|
tests/fileutils-tests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose.tools import raises
from spec_cleaner.fileutils import FileUtils
from spec_cleaner import RpmException
class TestFileutils(unittest.TestCase):
"""
We run few tests to ensure fileutils class works fine
"""
def setUp(self):
"""
Declare global scope variables for further use.
"""
self.fileutils = FileUtils()
@raises(RpmException)
def test_open_assertion(self):
self.fileutils.open('missing-file.txt', 'r')
@raises(RpmException)
def test_open_datafile_assertion(self):
self.fileutils.open_datafile('missing-file.txt')
def test_open(self):
self.fileutils.open('tests/fileutils-tests.py', 'r')
self.fileutils.close()
self.assertEqual(None, self.fileutils._file)
def test_open_datafile(self):
self.fileutils.open_datafile('excludes-bracketing.txt')
self.fileutils.close()
self.assertEqual(None, self.fileutils._file)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
from nose.tools import raises
from spec_cleaner.fileutils import FileUtils
from spec_cleaner import RpmException
class TestFileutils(unittest.TestCase):
"""
We run few tests to ensure fileutils class works fine
"""
def setUp(self):
"""
Declare global scope variables for further use.
"""
self.fileutils = FileUtils()
@raises(RpmException)
def test_open_assertion(self):
self.fileutils.open('missing-file.txt', 'r')
@raises(RpmException)
def test_open_datafile_assertion(self):
self.fileutils.open_datafile('missing-file.txt')
def test_open(self):
self.fileutils.open('tests/fileutils-tests.py', 'r')
self.fileutils.close()
self.assertEqual(None, self.fileutils.f)
def test_open_datafile(self):
self.fileutils.open_datafile('excludes-bracketing.txt')
self.fileutils.close()
self.assertEqual(None, self.fileutils.f)
|
Fix variable in testcase from previous commit change
|
Fix variable in testcase from previous commit change
|
Python
|
bsd-3-clause
|
plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner,plusky/spec-cleaner
|
---
+++
@@ -30,9 +30,9 @@
def test_open(self):
self.fileutils.open('tests/fileutils-tests.py', 'r')
self.fileutils.close()
- self.assertEqual(None, self.fileutils._file)
+ self.assertEqual(None, self.fileutils.f)
def test_open_datafile(self):
self.fileutils.open_datafile('excludes-bracketing.txt')
self.fileutils.close()
- self.assertEqual(None, self.fileutils._file)
+ self.assertEqual(None, self.fileutils.f)
|
cbaab510c92566ffdcc7eb65af0ec9cf1320f173
|
recipes/webrtc.py
|
recipes/webrtc.py
|
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class WebRTC(recipe_util.Recipe):
"""Basic Recipe class for WebRTC."""
@staticmethod
def fetch_spec(props):
url = 'https://chromium.googlesource.com/external/webrtc.git'
spec = {
'solutions': [
{
'name': 'src',
'url': url,
'deps_file': 'DEPS',
'managed': False,
'custom_deps': {},
'safesync_url': '',
},
],
'auto': True, # Runs git auto-svn as a part of the fetch.
'with_branch_heads': True,
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
return {
'type': 'gclient_git_svn',
'gclient_git_svn_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return WebRTC().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
# Copyright (c) 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class WebRTC(recipe_util.Recipe):
"""Basic Recipe class for WebRTC."""
@staticmethod
def fetch_spec(props):
url = 'https://chromium.googlesource.com/external/webrtc.git'
spec = {
'solutions': [
{
'name': 'src',
'url': url,
'deps_file': 'DEPS',
'managed': False,
'custom_deps': {},
'safesync_url': '',
},
],
'with_branch_heads': True,
}
if props.get('target_os'):
spec['target_os'] = props['target_os'].split(',')
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return WebRTC().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Switch WebRTC recipe to Git.
|
Switch WebRTC recipe to Git.
BUG=412012
Review URL: https://codereview.chromium.org/765373002
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@294546 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
---
+++
@@ -26,7 +26,6 @@
'safesync_url': '',
},
],
- 'auto': True, # Runs git auto-svn as a part of the fetch.
'with_branch_heads': True,
}
@@ -34,8 +33,8 @@
spec['target_os'] = props['target_os'].split(',')
return {
- 'type': 'gclient_git_svn',
- 'gclient_git_svn_spec': spec,
+ 'type': 'gclient_git',
+ 'gclient_git_spec': spec,
}
@staticmethod
|
54ab41cb8c30ddd46154f23e89947286222616e1
|
raven/__init__.py
|
raven/__init__.py
|
"""
raven
~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
__all__ = ('VERSION', 'Client', 'load')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
from raven.base import * # NOQA
from raven.conf import * # NOQA
|
"""
raven
~~~~~
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import os.path
from raven.base import * # NOQA
from raven.conf import * # NOQA
__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
.get_distribution('raven').version
except Exception as e:
VERSION = 'unknown'
def _get_git_revision(path):
revision_file = os.path.join(path, 'refs', 'heads', 'master')
if not os.path.exists(revision_file):
return None
fh = open(revision_file, 'r')
try:
return fh.read().strip()[:7]
finally:
fh.close()
def get_revision():
"""
:returns: Revision number of this branch/checkout, if available. None if
no revision number can be determined.
"""
package_dir = os.path.dirname(__file__)
checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
path = os.path.join(checkout_dir, '.git')
if os.path.exists(path):
return _get_git_revision(path)
return None
def get_version():
base = VERSION
if __build__:
base = '%s (%s)' % (base, __build__)
return base
__build__ = get_revision()
__docformat__ = 'restructuredtext en'
|
Add git sha to version if available
|
Add git sha to version if available
|
Python
|
bsd-3-clause
|
getsentry/raven-python,icereval/raven-python,inspirehep/raven-python,inspirehep/raven-python,dbravender/raven-python,hzy/raven-python,getsentry/raven-python,jmagnusson/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,icereval/raven-python,ewdurbin/raven-python,smarkets/raven-python,johansteffner/raven-python,jmp0xf/raven-python,jmagnusson/raven-python,inspirehep/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jmagnusson/raven-python,ewdurbin/raven-python,ronaldevers/raven-python,jmp0xf/raven-python,someonehan/raven-python,smarkets/raven-python,icereval/raven-python,jbarbuto/raven-python,danriti/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,arthurlogilab/raven-python,smarkets/raven-python,percipient/raven-python,hzy/raven-python,danriti/raven-python,dbravender/raven-python,ewdurbin/raven-python,recht/raven-python,akheron/raven-python,Photonomie/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,dbravender/raven-python,recht/raven-python,lepture/raven-python,danriti/raven-python,lepture/raven-python,ronaldevers/raven-python,arthurlogilab/raven-python,Photonomie/raven-python,nikolas/raven-python,akheron/raven-python,icereval/raven-python,johansteffner/raven-python,inspirehep/raven-python,Photonomie/raven-python,jbarbuto/raven-python,nikolas/raven-python,akalipetis/raven-python,akheron/raven-python,percipient/raven-python,someonehan/raven-python,jbarbuto/raven-python,getsentry/raven-python,someonehan/raven-python,ronaldevers/raven-python,lepture/raven-python,arthurlogilab/raven-python,akalipetis/raven-python,hzy/raven-python,jmp0xf/raven-python,smarkets/raven-python,nikolas/raven-python,akalipetis/raven-python,recht/raven-python,Goldmund-Wyldebeast-Wunderliebe/raven-python,jbarbuto/raven-python,nikolas/raven-python,percipient/raven-python
|
---
+++
@@ -6,7 +6,14 @@
:license: BSD, see LICENSE for more details.
"""
-__all__ = ('VERSION', 'Client', 'load')
+
+import os
+import os.path
+from raven.base import * # NOQA
+from raven.conf import * # NOQA
+
+
+__all__ = ('VERSION', 'Client', 'load', 'get_version')
try:
VERSION = __import__('pkg_resources') \
@@ -14,5 +21,36 @@
except Exception as e:
VERSION = 'unknown'
-from raven.base import * # NOQA
-from raven.conf import * # NOQA
+
+def _get_git_revision(path):
+ revision_file = os.path.join(path, 'refs', 'heads', 'master')
+ if not os.path.exists(revision_file):
+ return None
+ fh = open(revision_file, 'r')
+ try:
+ return fh.read().strip()[:7]
+ finally:
+ fh.close()
+
+
+def get_revision():
+ """
+ :returns: Revision number of this branch/checkout, if available. None if
+ no revision number can be determined.
+ """
+ package_dir = os.path.dirname(__file__)
+ checkout_dir = os.path.normpath(os.path.join(package_dir, os.pardir, os.pardir))
+ path = os.path.join(checkout_dir, '.git')
+ if os.path.exists(path):
+ return _get_git_revision(path)
+ return None
+
+
+def get_version():
+ base = VERSION
+ if __build__:
+ base = '%s (%s)' % (base, __build__)
+ return base
+
+__build__ = get_revision()
+__docformat__ = 'restructuredtext en'
|
b58caeae59a5ab363b4b6b40cbd19004b40dd206
|
calvin/actorstore/systemactors/sensor/Distance.py
|
calvin/actorstore/systemactors/sensor/Distance.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, manage, condition, stateguard
class Distance(Actor):
"""
Measure distance. Takes the frequency of measurements, in Hz, as input.
Outputs:
meters : Measured distance, in meters
"""
@manage(['frequency'])
def init(self, frequency):
self.frequency = frequency
self.setup()
def setup(self):
self.use("calvinsys.sensors.distance", shorthand="distance")
self['distance'].start(self.frequency)
def will_migrate(self):
self['distance'].stop()
def did_migrate(self):
self.setup()
@stateguard(lambda self: self['distance'].has_data())
@condition([], ['meters'])
def measure(self):
distance = self['distance'].read()
return (distance,)
action_priority = (measure,)
requires = ['calvinsys.sensors.distance']
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.actor.actor import Actor, manage, condition, stateguard
class Distance(Actor):
"""
Measure distance. Takes the frequency of measurements, in Hz, as input.
Outputs:
meters : Measured distance, in meters
"""
@manage(['frequency'])
def init(self, frequency):
self.frequency = frequency
self.setup()
def setup(self):
self.use("calvinsys.sensors.distance", shorthand="distance")
self['distance'].start(self.frequency)
def will_migrate(self):
self['distance'].stop()
def did_migrate(self):
self.setup()
def will_end(self):
self['distance'].stop()
@stateguard(lambda self: self['distance'].has_data())
@condition([], ['meters'])
def measure(self):
distance = self['distance'].read()
return (distance,)
action_priority = (measure,)
requires = ['calvinsys.sensors.distance']
|
Fix a bug where periodic timers weren't removed when application was stopped. FIXME: Check up on the semantics of will/did_start, will_migrate, did_migrate, will_stop.
|
Fix a bug where periodic timers weren't removed when application was stopped.
FIXME: Check up on the semantics of will/did_start, will_migrate, did_migrate, will_stop.
|
Python
|
apache-2.0
|
EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base
|
---
+++
@@ -41,6 +41,10 @@
def did_migrate(self):
self.setup()
+ def will_end(self):
+ self['distance'].stop()
+
+
@stateguard(lambda self: self['distance'].has_data())
@condition([], ['meters'])
def measure(self):
|
3f88f20a8855dd38ef53b270621d15bf0df4d62c
|
scavenger/main.py
|
scavenger/main.py
|
import logging
from time import time, sleep
from .net_utils import *
logger = logging.getLogger(__name__)
def target_scaner(interface, min_interval=30):
old_peers = {}
while True:
begin_time = time()
peers = {}
for ip, mac in arp_scaner():
peers[ip] = mac
if ip in old_peers: # Still online
del old_peers[ip] # Remove it from old_peers
# Now targets in old_peers
logger.info("%d target(s) found", len(old_peers))
for target in old_peers.items():
yield target
old_peers = peers
# Wait for next scanning, if needed
interval = time() - begin_time
if interval < min_interval:
sleep(min_interval - interval)
def try_target(target, interface, max_attempts=5, sleep_time=5):
ip, mac = target
logger.info("Trying target: %s, %s", ip, mac)
if not spoof_mac(mac, interface): # Failed to spoof mac
return False
for i in range(max_attempts):
sleep(sleep_time)
if get_ip() == ip:
break
else: # Failed to get this IP
return False
infos = check_online()
if infos: # Succeeded
return infos
else: # An offline IP
return False
|
import logging
from time import time, sleep
from .net_utils import *
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def target_scaner(interface, min_interval=30):
old_peers = {}
while True:
begin_time = time()
peers = {}
for ip, mac in arp_scaner():
peers[ip] = mac
if ip in old_peers: # Still online
del old_peers[ip] # Remove it from old_peers
# Now targets in old_peers
logger.info("%d target(s) found", len(old_peers))
for target in old_peers.items():
yield target
old_peers = peers
# Wait for next scanning, if needed
interval = time() - begin_time
if interval < min_interval:
sleep(min_interval - interval)
def try_target(target, interface, max_attempts=5, sleep_time=5):
ip, mac = target
logger.info("Trying target: %s, %s", ip, mac)
if not spoof_mac(mac, interface): # Failed to spoof mac
return False
for i in range(max_attempts):
sleep(sleep_time)
if get_ip() == ip:
break
else: # Failed to get this IP
return False
infos = check_online()
if infos: # Succeeded
return infos
else: # An offline IP
return False
|
Set logging level to INFO
|
Set logging level to INFO
|
Python
|
mit
|
ThomasLee969/scavenger
|
---
+++
@@ -4,6 +4,7 @@
from .net_utils import *
logger = logging.getLogger(__name__)
+logging.basicConfig(level=logging.INFO)
def target_scaner(interface, min_interval=30):
old_peers = {}
|
f48cb4fd946c8fa4b6157b8e1ea9ad8b385bc67a
|
src/hades/bin/generate_config.py
|
src/hades/bin/generate_config.py
|
import os
import sys
from hades import constants
from hades.common.cli import ArgumentParser, parser as common_parser
from hades.config.generate import ConfigGenerator
from hades.config.loader import load_config
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument(dest='source', metavar='SOURCE',
help="Template file name or template directory name")
parser.add_argument(dest='destination', metavar='DESTINATION', nargs='?',
help="Destination file or directory (default is stdout"
"for files; required for directories)")
args = parser.parse_args()
config = load_config(args.config)
template_dir = constants.templatedir
generator = ConfigGenerator(template_dir, config)
source_path = os.path.join(template_dir, args.source)
if os.path.isdir(source_path):
generator.from_directory(args.source, args.destination)
elif os.path.isfile(source_path):
if args.destination is None:
generator.from_file(args.source, sys.stdout)
else:
with open(args.destination, 'w', encoding='utf-8') as f:
generator.from_file(args.source, f)
else:
print("No such file or directory {} in {}".format(args.source,
template_dir),
file=sys.stderr)
return os.EX_NOINPUT
if __name__ == '__main__':
sys.exit(main())
|
import logging
import os
import sys
from hades import constants
from hades.common.cli import ArgumentParser, parser as common_parser
from hades.config.generate import ConfigGenerator
from hades.config.loader import load_config
logger = logging.getLogger()
def main():
parser = ArgumentParser(parents=[common_parser])
parser.add_argument(dest='source', metavar='SOURCE',
help="Template file name or template directory name")
parser.add_argument(dest='destination', metavar='DESTINATION', nargs='?',
help="Destination file or directory (default is stdout"
"for files; required for directories)")
args = parser.parse_args()
config = load_config(args.config)
template_dir = constants.templatedir
generator = ConfigGenerator(template_dir, config)
source_path = os.path.join(template_dir, args.source)
if os.path.isdir(source_path):
generator.from_directory(args.source, args.destination)
elif os.path.isfile(source_path):
if args.destination is None:
generator.from_file(args.source, sys.stdout)
else:
with open(args.destination, 'w', encoding='utf-8') as f:
generator.from_file(args.source, f)
else:
logger.critical("No such file or directory {} in {}"
.format(args.source, template_dir))
return os.EX_NOINPUT
if __name__ == '__main__':
sys.exit(main())
|
Use logger for hades-generate-config error messages
|
Use logger for hades-generate-config error messages
|
Python
|
mit
|
agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades,agdsn/hades
|
---
+++
@@ -1,3 +1,4 @@
+import logging
import os
import sys
@@ -5,6 +6,8 @@
from hades.common.cli import ArgumentParser, parser as common_parser
from hades.config.generate import ConfigGenerator
from hades.config.loader import load_config
+
+logger = logging.getLogger()
def main():
@@ -28,9 +31,8 @@
with open(args.destination, 'w', encoding='utf-8') as f:
generator.from_file(args.source, f)
else:
- print("No such file or directory {} in {}".format(args.source,
- template_dir),
- file=sys.stderr)
+ logger.critical("No such file or directory {} in {}"
+ .format(args.source, template_dir))
return os.EX_NOINPUT
|
295200dba8569a7a667de46a13c6e36ca757f5d5
|
quickstart/python/understand/example-2/create_joke_intent.6.x.py
|
quickstart/python/understand/example-2/create_joke_intent.6.x.py
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Create a new intent named 'tell_a_joke'
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
intent = client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents \
.create(unique_name='tell-a-joke')
# Provide actions for the new intent
joke_actions = {
'actions': [
{'say': 'I was going to look for my missing watch, but I could never find the time.'}
]
}
# Update the tell-a-joke intent to use this 'say' action.
client.preview.understand \
.assistants(assistant_sid) \
.intents(intent.sid) \
.intent_actions().update(joke_actions)
print(intent.sid)
|
# Download the helper library from https://www.twilio.com/docs/python/install
from twilio.rest import Client
# Your Account Sid and Auth Token from twilio.com/console
account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX'
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
# Create a new task named 'tell_a_joke'
# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
intent = client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents \
.create(unique_name='tell-a-joke')
# Provide actions for the new task
joke_actions = {
'actions': [
{'say': 'I was going to look for my missing watch, but I could never find the time.'}
]
}
# Update the tell-a-joke task to use this 'say' action.
client.preview.understand \
.assistants(assistant_sid) \
.intents(intent.sid) \
.intent_actions().update(joke_actions)
print(intent.sid)
|
Update console link, intent-->task in comments
|
Update console link, intent-->task in comments
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets
|
---
+++
@@ -6,21 +6,21 @@
auth_token = 'your_auth_token'
client = Client(account_sid, auth_token)
-# Create a new intent named 'tell_a_joke'
-# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/assistant/list
+# Create a new task named 'tell_a_joke'
+# Replace 'UAXXX...' with your Assistant's unique SID https://www.twilio.com/console/autopilot/list
intent = client.preview.understand \
.assistants('UAXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.intents \
.create(unique_name='tell-a-joke')
-# Provide actions for the new intent
+# Provide actions for the new task
joke_actions = {
'actions': [
{'say': 'I was going to look for my missing watch, but I could never find the time.'}
]
}
-# Update the tell-a-joke intent to use this 'say' action.
+# Update the tell-a-joke task to use this 'say' action.
client.preview.understand \
.assistants(assistant_sid) \
.intents(intent.sid) \
|
3df59e18a98be4da2ca2ed2d40a869d65696fe9b
|
src/helpers/utils.py
|
src/helpers/utils.py
|
from __future__ import unicode_literals
from config import settings
import os
import re
import string
import pytz
def clear_screen():
# Clear screen
os.system(['clear', 'cls'][os.name == 'nt'])
def print_obj(obj):
for attr, val in obj.__dict__.iteritems():
print "{0}: {1}".format(attr, val)
def convert_to_snake_case(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_int(text):
digits = ''.join([c for c in text if c.isdigit()])
if digits.isdigit():
return int(digits)
return None
def as_client_tz(dt):
return dt.astimezone(pytz.timezone(settings.CLIENT_TZ))
def make_data_updates_string(data_changes):
updates = ""
if data_changes:
for key in data_changes:
title = key.replace("_", " ").capitalize()
old_val = data_changes[key]['old']
new_val = data_changes[key]['new']
updates += "\n{0}: {1} => {2}".format(title, old_val, new_val)
return updates
def delta_minutes(now, before):
delta_datetime = now - before
minutes_ago = delta_datetime.seconds / 60
return minutes_ago
|
from __future__ import unicode_literals
from config import settings
import os
import re
import string
import pytz
def clear_screen():
# Clear screen
os.system(['clear', 'cls'][os.name == 'nt'])
def print_obj(obj):
for attr, val in obj.__dict__.iteritems():
print "{0}: {1}".format(attr, val)
def convert_to_snake_case(name):
s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name)
return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
def parse_int(text):
digits = ''.join([c for c in text if c.isdigit()])
if digits.isdigit():
return int(digits)
return None
def as_client_tz(dt):
return dt.astimezone(pytz.timezone(settings.CLIENT_TZ))
def make_data_updates_string(data_changes):
updates = ""
if data_changes:
for key in data_changes:
title = key.replace("_", " ").capitalize()
old_val = data_changes[key]['old']
new_val = data_changes[key]['new']
updates += "\n{0}: {1} => {2}".format(title, old_val, new_val)
return updates
def delta_minutes(now, before):
delta_datetime = now - before
minutes_ago = delta_datetime.total_seconds() / 60
return minutes_ago
|
Fix error in delta_minutes, use timedelta.total_seconds()/60 instead of timedelta.seconds/60.
|
Fix error in delta_minutes, use timedelta.total_seconds()/60 instead of timedelta.seconds/60.
|
Python
|
mit
|
lesh1k/VKStalk
|
---
+++
@@ -48,5 +48,5 @@
def delta_minutes(now, before):
delta_datetime = now - before
- minutes_ago = delta_datetime.seconds / 60
+ minutes_ago = delta_datetime.total_seconds() / 60
return minutes_ago
|
e7962ec34a86fe464862ab81ec8cc8da14a732f6
|
models/weather.py
|
models/weather.py
|
from configparser import ConfigParser
from googletrans import Translator
import pyowm
import logging
import re
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
cfg = ConfigParser()
cfg.read('config')
api_key = cfg.get('auth', 'owm_api_key')
def weather(bot, update):
owm = pyowm.OWM(api_key, language='zh_tw')
translator = Translator()
location = update.message.text
location = re.sub(u'天氣 ','',location)
trans = translator.translate(location).text
logger.info("get weather at %s" %trans)
try:
obs = owm.weather_at_place(trans)
w = obs.get_weather()
update.message.reply_text(location+'的天氣\n'+'溫度:'+str(w.get_temperature(unit='celsius')['temp'])+
' 濕度:'+str(w.get_humidity()))
except:
update.message.reply_text('Location fucking not found')
|
from configparser import ConfigParser
from googletrans import Translator
import pyowm
import logging
import re
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
cfg = ConfigParser()
cfg.read('config')
api_key = cfg.get('auth', 'owm_api_key')
def weather(bot, update):
owm = pyowm.OWM(api_key, language='zh_tw')
translator = Translator()
location = update.message.text
location = re.sub(u'天氣 ','',location)
trans = translator.translate(location).text
logger.info("get weather at %s" %trans)
try:
obs = owm.weather_at_place(trans)
w = obs.get_weather()
update.message.reply_text(location+'的天氣\n'+'溫度:'+str(w.get_temperature(unit='celsius')['temp'])+
' 濕度:'+str(w.get_humidity())+' 天氣狀況:'+str(w.get_status()))
except:
update.message.reply_text('Location fucking not found')
|
Add Info to Weather Check
|
Add Info to Weather Check
Add current weather status to weather check features.
|
Python
|
mit
|
iGene/igene_bot
|
---
+++
@@ -21,6 +21,6 @@
obs = owm.weather_at_place(trans)
w = obs.get_weather()
update.message.reply_text(location+'的天氣\n'+'溫度:'+str(w.get_temperature(unit='celsius')['temp'])+
- ' 濕度:'+str(w.get_humidity()))
+ ' 濕度:'+str(w.get_humidity())+' 天氣狀況:'+str(w.get_status()))
except:
update.message.reply_text('Location fucking not found')
|
6cdb6ada40f28ecc7629ecdb5dd67c06f6013e88
|
router_monitor.py
|
router_monitor.py
|
import schedule, time, os, sys, urllib2, base64
from datetime import datetime
def check_connection():
sys.stdout.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " Connection")
hostname = "google.com"
response = os.system("ping -c 1 " + hostname + " > /dev/null")
if response == 0:
print " is up"
else:
print " is down!"
def restart_router():
request = urllib2.Request("http://192.18.0.1/sky_rebootCPE.html")
base = base64.encodestring("%s:%s" % ("admin", "sky")).replace("\n","")
request.add_header("Authorization", "Basic %s" % base)
result = urllib2.urlopen(request)
print sys.version
print "starting"
check_connection()
schedule.every(1).minutes.do(check_connection)
while 1:
schedule.run_pending()
time.sleep(1)
|
# python -u router_monitor.py >> router_log.txt 2>&1 &
import schedule, time, os, sys, urllib2, base64
from datetime import datetime
def check_connection():
sys.stdout.write(datetime.now().strftime("%Y-%m-%d %H:%M:%S") + " Connection")
hostname = "google.com"
response = os.system("ping -c 1 " + hostname + " > /dev/null")
if response == 0:
print " is up"
else:
print " is down!"
def restart_router():
request = urllib2.Request("http://192.18.0.1/sky_rebootCPE.html")
base = base64.encodestring("%s:%s" % ("admin", "sky")).replace("\n","")
request.add_header("Authorization", "Basic %s" % base)
result = urllib2.urlopen(request)
print "starting"
check_connection()
schedule.every(1).minutes.do(check_connection)
while 1:
schedule.run_pending()
time.sleep(1)
|
Add usage comment and remove python version
|
Add usage comment and remove python version
|
Python
|
mit
|
danic85/router_monitor
|
---
+++
@@ -1,3 +1,4 @@
+# python -u router_monitor.py >> router_log.txt 2>&1 &
import schedule, time, os, sys, urllib2, base64
from datetime import datetime
def check_connection():
@@ -15,7 +16,6 @@
request.add_header("Authorization", "Basic %s" % base)
result = urllib2.urlopen(request)
-print sys.version
print "starting"
check_connection()
schedule.every(1).minutes.do(check_connection)
|
779639680e02fb4de920a95558eceecc2a55ab46
|
indra/bel/ndex_client.py
|
indra/bel/ndex_client.py
|
import requests
import json
import time
#ndex_base_url = 'http://services.bigmech.ndexbio.org'
ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
import requests
import json
import time
ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
# If response is immediate, we get 200
if status == 200:
return res.text
# If there is a continuation of the message
# we get status 300, handled below.
# Otherwise we return None.
elif status != 300:
return None
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
Add new URL for NDEx service
|
Add new URL for NDEx service
|
Python
|
bsd-2-clause
|
jmuhlich/indra,johnbachman/belpy,sorgerlab/indra,jmuhlich/indra,sorgerlab/belpy,bgyori/indra,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,jmuhlich/indra,pvtodorov/indra,bgyori/indra
|
---
+++
@@ -2,8 +2,8 @@
import json
import time
-#ndex_base_url = 'http://services.bigmech.ndexbio.org'
-ndex_base_url = 'http://52.37.175.128'
+ndex_base_url = 'http://general.bigmech.ndexbio.org:8082'
+#ndex_base_url = 'http://52.37.175.128'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
|
a2005d98cd81424a2a43277cae0595806751e9dc
|
swh/web/ui/controller/service.py
|
swh/web/ui/controller/service.py
|
# Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
if output:
h_res = json.loads(output, cls=SWHJSONDecoder)
if h_res:
return h_res['found']
return None
return False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
|
# Copyright (C) 2015 The Software Heritage developers
# See the AUTHORS file at the top-level directory of this distribution
# License: GNU General Public License version 3, or any later version
# See top-level LICENSE file for more information
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
return json.loads(output, cls=SWHJSONDecoder) if output else False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
|
Refactor - Simplify the call to api
|
Refactor - Simplify the call to api
|
Python
|
agpl-3.0
|
SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui
|
---
+++
@@ -26,12 +26,7 @@
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
- if output:
- h_res = json.loads(output, cls=SWHJSONDecoder)
- if h_res:
- return h_res['found']
- return None
- return False
+ return json.loads(output, cls=SWHJSONDecoder) if output else False
return False
q = api_query.api_storage_content_present({'content': hashes})
|
3db5c62d116399a0b26e8c68d3e4274d5a6bd17b
|
django_makemessages_xgettext/management/commands/makemessagesxgettext.py
|
django_makemessages_xgettext/management/commands/makemessagesxgettext.py
|
import django
if django.get_version().startswith("1.8"):
from django_makemessages_xgettext import django18_makemessagesxgettext
Command = django18_makemessagesxgettext.Command
else:
from django_makemessages_xgettext import django17_makemessagesxgettext
Command = django17_makemessagesxgettext.Command
|
import django
if django.get_version().startswith("1.7"):
from django_makemessages_xgettext import django17_makemessagesxgettext
Command = django17_makemessagesxgettext.Command
else:
from django_makemessages_xgettext import django18_makemessagesxgettext
Command = django18_makemessagesxgettext.Command
|
Support for django 1.9 and beyond!
|
Support for django 1.9 and beyond!
|
Python
|
bsd-3-clause
|
resulto-admin/django-makemessages-xgettext
|
---
+++
@@ -1,9 +1,8 @@
import django
-if django.get_version().startswith("1.8"):
+if django.get_version().startswith("1.7"):
+ from django_makemessages_xgettext import django17_makemessagesxgettext
+ Command = django17_makemessagesxgettext.Command
+else:
from django_makemessages_xgettext import django18_makemessagesxgettext
Command = django18_makemessagesxgettext.Command
-
-else:
- from django_makemessages_xgettext import django17_makemessagesxgettext
- Command = django17_makemessagesxgettext.Command
|
50926966919fa5cf140e3f30815f86c93189cc49
|
percy/utils.py
|
percy/utils.py
|
from __future__ import print_function
import sys
import hashlib
import base64
def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def to_unicode(content):
# TODO: considering using the 'six' library for this, for now just do something simple.
if sys.version_info >= (3,0):
return str(content)
elif sys.version_info < (3,0):
return unicode(content)
def sha256hash(content):
if _is_unicode(content):
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def base64encode(content):
if _is_unicode(content):
content = content.encode('utf-8')
return base64.b64encode(content)
def _is_unicode(content):
if (sys.version_info >= (3,0) and isinstance(content, str)
or sys.version_info < (3,0) and isinstance(content, unicode)):
return True
return False
|
from __future__ import print_function
import sys
import hashlib
import base64
def print_error(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
def to_unicode(content):
# TODO: considering using the 'six' library for this, for now just do something simple.
if sys.version_info >= (3,0):
return str(content)
elif sys.version_info < (3,0):
return unicode(content)
def sha256hash(content):
if _is_unicode(content):
content = content.encode('utf-8')
return hashlib.sha256(content).hexdigest()
def base64encode(content):
if _is_unicode(content):
content = content.encode('utf-8')
return to_unicode(base64.b64encode(content))
def _is_unicode(content):
if (sys.version_info >= (3,0) and isinstance(content, str)
or sys.version_info < (3,0) and isinstance(content, unicode)):
return True
return False
|
Return serializable string from base64encode.
|
Return serializable string from base64encode.
|
Python
|
mit
|
percy/python-percy-client
|
---
+++
@@ -21,7 +21,7 @@
def base64encode(content):
if _is_unicode(content):
content = content.encode('utf-8')
- return base64.b64encode(content)
+ return to_unicode(base64.b64encode(content))
def _is_unicode(content):
if (sys.version_info >= (3,0) and isinstance(content, str)
|
c20fd19d10a77cda0c16fd5a4fa329a43933751f
|
src/commoner/profiles/models.py
|
src/commoner/profiles/models.py
|
from django.db import models
from django.db.models import permalink
from django.contrib.auth.models import User
class Content(models.Model):
commoner = models.ForeignKey(User)
title = models.CharField(max_length=255, blank=True)
url = models.URLField(max_length=255, blank=False)
def __unicode__(self):
return self.title or self.url
@permalink
def get_absolute_url(self):
return ('profiles.views.content_detail', (self.id,))
class CommonerProfile(models.Model):
user = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=255, blank=True)
homepage = models.URLField(max_length=255, blank=True)
location = models.CharField(max_length=255, blank=True)
def __unicode__(self):
if self.nickname:
return u"%s (%s)" % (self.user.username, self.nickname)
return self.user.username
@permalink
def get_absolute_url(self):
return ('commoner.profiles.views.profile_view', (str(self.user),))
|
import urlparse
from django.db import models
from django.db.models import permalink
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from commoner.util import getBaseURL
class Content(models.Model):
commoner = models.ForeignKey(User)
title = models.CharField(max_length=255, blank=True)
url = models.URLField(max_length=255, blank=False)
def __unicode__(self):
return self.title or self.url
@permalink
def get_absolute_url(self):
return ('profiles.views.content_detail', (self.id,))
class CommonerProfile(models.Model):
user = models.ForeignKey(User, unique=True)
nickname = models.CharField(max_length=255, blank=True)
homepage = models.URLField(max_length=255, blank=True)
location = models.CharField(max_length=255, blank=True)
def __unicode__(self):
if self.nickname:
return u"%s (%s)" % (self.user.username, self.nickname)
return self.user.username
def display_name(self):
return self.nickname or self.user.username
def get_absolute_url(self, request=None):
if request is None:
return reverse('profile_view', args=(self.user.username, ) )
else:
return urlparse.urljoin(
getBaseURL(request),
reverse('profile_view', args=(self.user.username, ) )
)
|
Support generating a completely absolute URL for the profile; added a display_name method to the Profile model.
|
Support generating a completely absolute URL for the profile; added a
display_name method to the Profile model.
|
Python
|
agpl-3.0
|
cc-archive/commoner,cc-archive/commoner
|
---
+++
@@ -1,6 +1,11 @@
+import urlparse
+
from django.db import models
from django.db.models import permalink
+from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
+
+from commoner.util import getBaseURL
class Content(models.Model):
@@ -29,7 +34,16 @@
return u"%s (%s)" % (self.user.username, self.nickname)
return self.user.username
- @permalink
- def get_absolute_url(self):
- return ('commoner.profiles.views.profile_view', (str(self.user),))
+ def display_name(self):
+ return self.nickname or self.user.username
+ def get_absolute_url(self, request=None):
+ if request is None:
+ return reverse('profile_view', args=(self.user.username, ) )
+ else:
+ return urlparse.urljoin(
+ getBaseURL(request),
+ reverse('profile_view', args=(self.user.username, ) )
+ )
+
+
|
24e2e391dece37c245d8a459456f3e30cd2346a8
|
openxc/vehicle.py
|
openxc/vehicle.py
|
from .measurements import Measurement
from .sinks.base import MeasurementNotifierSink
class Vehicle(object):
def __init__(self, source=None):
self.sources = set()
self.sinks = set()
self.measurements = {}
self.add_source(source)
self.notifier = MeasurementNotifierSink()
self.sinks.add(self.notifier)
def get(self, measurement_class):
name = Measurement.name_from_class(measurement_class)
return self._construct_measurement(name)
def listen(self, measurement_class, listener):
self.notifier.register(measurement_class, listener)
def unlisten(self, measurement_class, listener):
self.notifier.unregister(measurement_class, listener)
def _receive(self, message, **kwargs):
name = message['name']
self.measurements[name] = message
for sink in self.sinks:
sink.receive(message, **kwargs)
def _construct_measurement(self, measurement_id):
raw_measurement = self.measurements.get(measurement_id, None)
if raw_measurement is not None:
return Measurement.from_dict(raw_measurement)
def add_source(self, source):
if source is not None:
self.sources.add(source)
source.callback = self._receive
source.start()
def add_sink(self, sink):
if sink is not None:
self.sinks.add(sink)
if hasattr(sink, 'start'):
sink.start()
|
from .measurements import Measurement
from .sinks.base import MeasurementNotifierSink
class Vehicle(object):
def __init__(self, interface=None):
self.sources = set()
self.sinks = set()
self.measurements = {}
if interface is not None:
self.add_source(interface)
self.controller = interface
self.notifier = MeasurementNotifierSink()
self.sinks.add(self.notifier)
def get(self, measurement_class):
name = Measurement.name_from_class(measurement_class)
return self._construct_measurement(name)
def listen(self, measurement_class, listener):
self.notifier.register(measurement_class, listener)
def unlisten(self, measurement_class, listener):
self.notifier.unregister(measurement_class, listener)
def _receive(self, message, **kwargs):
name = message['name']
self.measurements[name] = message
for sink in self.sinks:
sink.receive(message, **kwargs)
def _construct_measurement(self, measurement_id):
raw_measurement = self.measurements.get(measurement_id, None)
if raw_measurement is not None:
return Measurement.from_dict(raw_measurement)
def add_source(self, source):
if source is not None:
self.sources.add(source)
source.callback = self._receive
source.start()
def add_sink(self, sink):
if sink is not None:
self.sinks.add(sink)
if hasattr(sink, 'start'):
sink.start()
|
Change constructor of Vehicle to accept an Interface instead of just Source.
|
Change constructor of Vehicle to accept an Interface instead of just Source.
|
Python
|
bsd-3-clause
|
openxc/openxc-python,openxc/openxc-python,openxc/openxc-python
|
---
+++
@@ -3,11 +3,14 @@
class Vehicle(object):
- def __init__(self, source=None):
+ def __init__(self, interface=None):
self.sources = set()
self.sinks = set()
self.measurements = {}
- self.add_source(source)
+
+ if interface is not None:
+ self.add_source(interface)
+ self.controller = interface
self.notifier = MeasurementNotifierSink()
self.sinks.add(self.notifier)
|
ffd429281ed6695457304646467a6d9e0a0301a4
|
src/nyc_trees/apps/core/tasks.py
|
src/nyc_trees/apps/core/tasks.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from celery import task
from django.core.files.storage import default_storage
@task(bind=True, max_retries=15, default_retry_delay=2)
def wait_for_default_storage_file(self, filename):
if default_storage.exists(filename):
return filename
else:
self.retry()
|
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from celery import task
from django.core.files.storage import default_storage
@task(bind=True, max_retries=15, default_retry_delay=2)
def wait_for_default_storage_file(self, filename):
if default_storage.exists(filename):
return filename
elif self.request.retries < self.max_retries:
self.retry()
else:
return None
|
Send RSVP email even if PDF doesn't exist
|
Send RSVP email even if PDF doesn't exist
If the PDF can't be found on the disk after the maximum number of
retries, return None, instead of raising an exception. This ensures that
the RSVP email notification still sends even without the PDF attachment.
Refs #1655
|
Python
|
agpl-3.0
|
azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,azavea/nyc-trees,maurizi/nyc-trees,kdeloach/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees,kdeloach/nyc-trees,maurizi/nyc-trees,azavea/nyc-trees
|
---
+++
@@ -13,5 +13,7 @@
def wait_for_default_storage_file(self, filename):
if default_storage.exists(filename):
return filename
+ elif self.request.retries < self.max_retries:
+ self.retry()
else:
- self.retry()
+ return None
|
3bad6c23fad5525628db1c3c1b99f3f86c08db63
|
cloudmeta/apps/metadata/models.py
|
cloudmeta/apps/metadata/models.py
|
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
from django.db import models
KEYTYPE_CHOICES = (
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
class Node(models.Model):
name = models.CharField(unique=True, primary_key=True, max_length=256)
hostname = models.CharField(blank=True, max_length=256)
public_keys = models.ManyToManyField('OpensshKey')
def __unicode__(self):
return self.name
class OpensshKey(models.Model):
name = models.CharField(unique=True, max_length=256)
keytype = models.CharField(max_length=6, choices=KEYTYPE_CHOICES)
key = models.TextField()
host = models.CharField(max_length=256, blank=True)
def __unicode__(self):
return self.name
|
Allow ecdsa 384 bit keys too
|
Allow ecdsa 384 bit keys too
|
Python
|
agpl-3.0
|
bencord0/cloudmeta,bencord0/cloudmeta
|
---
+++
@@ -4,6 +4,7 @@
('RSA', 'ssh-rsa'),
('DSA', 'ssh-dsa'),
('ECC-256', 'ecdsa-sha2-nistp256'),
+ ('ECC-384', 'ecdsa-sha2-nistp384'),
('ECC-521', 'ecdsa-sha2-nistp521'),
)
|
6932164f20ced80ff6d08402b84aba954a983e2d
|
iota/commands/extended/get_transaction_objects.py
|
iota/commands/extended/get_transaction_objects.py
|
# coding=utf-8
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import Trytes
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.StrictIota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Optional[Iterable[TransactionHash]]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
f.Required | f.Array | f.FilterRepeater(
f.Required |
Trytes(TransactionHash) |
f.Unicode(encoding='ascii', normalize=False),
),
})
|
# coding=utf-8
from __future__ import absolute_import, division, print_function, \
unicode_literals
from typing import Iterable, List, Optional
import filters as f
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
from iota.filters import StringifiedTrytesArray
__all__ = [
'GetTransactionObjectsCommand',
]
class GetTransactionObjectsCommand(FilterCommand):
"""
Executes `GetTransactionObjectsCommand` command.
See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
def get_request_filter(self):
return GetTransactionObjectsRequestFilter()
def get_response_filter(self):
pass
def _execute(self, request):
hashes = request\
.get('hashes') # type: Iterable[TransactionHash]
transactions = []
if hashes:
gt_response = GetTrytesCommand(adapter=self.adapter)(hashes=hashes)
transactions = list(map(
Transaction.from_tryte_string,
gt_response.get('trytes') or [],
)) # type: List[Transaction]
return {
'transactions': transactions,
}
class GetTransactionObjectsRequestFilter(RequestFilter):
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
StringifiedTrytesArray(TransactionHash) | f.Required
})
|
Use filter macro for request validation
|
Use filter macro for request validation
StringifiedTrytesArray(Type) filter macro was
introduced in #243. Becasue of this, no request
filter test case is needed, hence the macro is
covered already in other test cases.
|
Python
|
mit
|
iotaledger/iota.lib.py
|
---
+++
@@ -9,7 +9,7 @@
from iota import Transaction, TransactionHash
from iota.commands.core import GetTrytesCommand
from iota.commands import FilterCommand, RequestFilter
-from iota.filters import Trytes
+from iota.filters import StringifiedTrytesArray
__all__ = [
'GetTransactionObjectsCommand',
@@ -20,7 +20,7 @@
"""
Executes `GetTransactionObjectsCommand` command.
- See :py:meth:`iota.api.StrictIota.get_transaction_objects`.
+ See :py:meth:`iota.api.Iota.get_transaction_objects`.
"""
command = 'getTransactionObjects'
@@ -32,7 +32,7 @@
def _execute(self, request):
hashes = request\
- .get('hashes') # type: Optional[Iterable[TransactionHash]]
+ .get('hashes') # type: Iterable[TransactionHash]
transactions = []
if hashes:
@@ -51,9 +51,5 @@
def __init__(self):
super(GetTransactionObjectsRequestFilter, self).__init__({
'hashes':
- f.Required | f.Array | f.FilterRepeater(
- f.Required |
- Trytes(TransactionHash) |
- f.Unicode(encoding='ascii', normalize=False),
- ),
+ StringifiedTrytesArray(TransactionHash) | f.Required
})
|
0a0ebdde63628d9504ac9834ef20b996ab595d7d
|
stock_quant_package_product_packaging/models/stock_move_line.py
|
stock_quant_package_product_packaging/models/stock_move_line.py
|
# Copyright 2019 Camptocamp SA
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl)
from odoo import models
class StockMoveLine(models.Model):
_inherit = "stock.move.line"
def _action_done(self):
res = super()._action_done()
for line in self.filtered(lambda l: l.result_package_id):
line.result_package_id.auto_assign_packaging()
return res
|
# Copyright 2019 Camptocamp SA
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl)
from odoo import models
class StockMoveLine(models.Model):
_inherit = "stock.move.line"
def _action_done(self):
res = super()._action_done()
# _action_done in stock module sometimes delete a move line, we
# have to check if it still exists before reading/writing on it
for line in self.exists().filtered(lambda l: l.result_package_id):
line.result_package_id.auto_assign_packaging()
return res
|
Fix "record does not exist" when validating move line
|
Fix "record does not exist" when validating move line
When _action_done is called on a transfer, it may delete a part of
the move lines. The extension of `_action_done()` that assigns
a packaging fails with "Record does not exist or has been deleted".
Check if the if lines still exist before writing on them.
|
Python
|
agpl-3.0
|
BT-ojossen/stock-logistics-workflow,OCA/stock-logistics-workflow,BT-ojossen/stock-logistics-workflow,OCA/stock-logistics-workflow
|
---
+++
@@ -8,6 +8,8 @@
def _action_done(self):
res = super()._action_done()
- for line in self.filtered(lambda l: l.result_package_id):
+ # _action_done in stock module sometimes delete a move line, we
+ # have to check if it still exists before reading/writing on it
+ for line in self.exists().filtered(lambda l: l.result_package_id):
line.result_package_id.auto_assign_packaging()
return res
|
92d0a09cfb232270d04f82eccc451ee63bd7901a
|
dev/TOPSECRET/SirBot/lib/sirbot/shutdown.py
|
dev/TOPSECRET/SirBot/lib/sirbot/shutdown.py
|
# -*- coding: utf-8 -*-
#script containing all pertinent tasks to prepare for software termination.
#successful completion of this process at last runtime, will skip extra validation
#steps on next run
from json import dumps
def shutdown(config,interinput=None,interoutput=None):
#check for lingering runtime errors
#finishing writing log queues to file
#if none: write clean.start file in config directory
if(config['Interface']['remember position'] == 0):
config['Interface']['map'] = '620x540+50+50'
configPath = config['Path']+'\\config\\sirbot\\config'
configFile = open(configPath,"wb+")
configData = dumps(config)
configFile.write(configData)
configFile.close()
#perhaps add garbage collector control here?
|
# -*- coding: utf-8 -*-
#script containing all pertinent tasks to prepare for software termination.
#successful completion of this process at last runtime, will skip extra validation
#steps on next run
def shutdown(config,interinput,interoutput):
#check for lingering runtime errors
#finishing writing log queues to file
#if none: write clean.start file in config directory
pass
#perhaps add garbage collector control here?
|
Revert "changes to config during runtime are now saved"
|
Revert "changes to config during runtime are now saved"
This reverts commit e09a780da17bb2f97d20aafc2c007fe3fc3051bb.
|
Python
|
mit
|
SirRujak/SirBot
|
---
+++
@@ -4,19 +4,10 @@
#successful completion of this process at last runtime, will skip extra validation
#steps on next run
-from json import dumps
-
-def shutdown(config,interinput=None,interoutput=None):
+def shutdown(config,interinput,interoutput):
#check for lingering runtime errors
#finishing writing log queues to file
#if none: write clean.start file in config directory
- if(config['Interface']['remember position'] == 0):
- config['Interface']['map'] = '620x540+50+50'
-
- configPath = config['Path']+'\\config\\sirbot\\config'
- configFile = open(configPath,"wb+")
- configData = dumps(config)
- configFile.write(configData)
- configFile.close()
+ pass
#perhaps add garbage collector control here?
|
84af16cb5c6938fad4a10d248ec6e1ff20ec2306
|
neupy/__init__.py
|
neupy/__init__.py
|
"""
NeuPy is the Artificial Neural Network library implemented in Python.
"""
__version__ = '0.2.2'
|
"""
NeuPy is the Artificial Neural Network library implemented in Python.
"""
__version__ = '0.3.0dev1'
|
Set up version 0.3.0 dev 1.
|
Set up version 0.3.0 dev 1.
|
Python
|
mit
|
itdxer/neupy,itdxer/neupy,itdxer/neupy,itdxer/neupy
|
---
+++
@@ -3,4 +3,4 @@
"""
-__version__ = '0.2.2'
+__version__ = '0.3.0dev1'
|
411813bafe4b2af57aa7695f035e3e02b20ae85e
|
src/syntax/infix_coordination.py
|
src/syntax/infix_coordination.py
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
self.result_string = ""
# Break the tree
def break_tree(self, tree):
t = Tree.fromstring(str(tree))
self.has_infix_coordination = False
self.result_string = ""
self.parse_tree(t)
print "Infix Coordination: " + str(self.has_infix_coordination)
return self.result_string
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
self.has_infix_coordination |= tree.label() == "CC"
for node in tree:
self.parse_tree(node)
|
__author__ = 's7a'
# All imports
from nltk.tree import Tree
# The infix coordination class
class InfixCoordination:
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
# Break the tree
def break_tree(self, tree):
self.has_infix_coordination = False
self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
# Parse the tree
def parse_tree(self, tree):
if type(tree) == Tree:
sentence_root = tree[0]
if type(sentence_root) == Tree:
if sentence_root.label() == "S":
print "Valid Tree"
for node in sentence_root:
if type(node) == Tree:
if node.label() == "CC":
self.has_infix_coordination |= True
|
Change the rules for infix coordinario
|
Change the rules for infix coordinario
|
Python
|
mit
|
Somsubhra/Simplify,Somsubhra/Simplify,Somsubhra/Simplify
|
---
+++
@@ -10,27 +10,21 @@
# Constructor for the infix coordination
def __init__(self):
self.has_infix_coordination = False
- self.result_string = ""
# Break the tree
def break_tree(self, tree):
- t = Tree.fromstring(str(tree))
-
self.has_infix_coordination = False
- self.result_string = ""
-
- self.parse_tree(t)
-
+ self.parse_tree(tree)
print "Infix Coordination: " + str(self.has_infix_coordination)
-
- return self.result_string
# Parse the tree
def parse_tree(self, tree):
-
if type(tree) == Tree:
-
- self.has_infix_coordination |= tree.label() == "CC"
-
- for node in tree:
- self.parse_tree(node)
+ sentence_root = tree[0]
+ if type(sentence_root) == Tree:
+ if sentence_root.label() == "S":
+ print "Valid Tree"
+ for node in sentence_root:
+ if type(node) == Tree:
+ if node.label() == "CC":
+ self.has_infix_coordination |= True
|
b154eef7d36359d0fbcc11a165371d3a54f40682
|
libtbx_refresh.py
|
libtbx_refresh.py
|
from __future__ import division
def run():
from dials.util.config import CompletionGenerator
gen = CompletionGenerator()
gen.generate()
try:
run()
except Exception, e:
pass
|
from __future__ import division
def run():
from dials.util.config import CompletionGenerator
gen = CompletionGenerator()
gen.generate()
try:
run()
except Exception, e:
pass
try:
from glob import glob
import os
filenames = glob("extensions/*.pyc")
if len(filenames) > 0:
print "Cleaning up 'dials/extensions':"
for filename in glob("extensions/*.pyc"):
print " Deleting %s" % filename
os.remove(filename)
except Exception, e:
pass
|
Delete *.pyc files from dials/extensions when doing libtbx.refresh
|
Delete *.pyc files from dials/extensions when doing libtbx.refresh
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
---
+++
@@ -9,3 +9,15 @@
run()
except Exception, e:
pass
+
+try:
+ from glob import glob
+ import os
+ filenames = glob("extensions/*.pyc")
+ if len(filenames) > 0:
+ print "Cleaning up 'dials/extensions':"
+ for filename in glob("extensions/*.pyc"):
+ print " Deleting %s" % filename
+ os.remove(filename)
+except Exception, e:
+ pass
|
89454b1e83e01a4d523b776f74429a81467762da
|
redis/utils.py
|
redis/utils.py
|
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
from contextlib import contextmanager
try:
import hiredis
HIREDIS_AVAILABLE = True
except ImportError:
HIREDIS_AVAILABLE = False
def from_url(url, db=None, **kwargs):
"""
Returns an active Redis client generated from the given database URL.
Will attempt to extract the database id from the path url fragment, if
none is provided.
"""
from redis.client import Redis
return Redis.from_url(url, db, **kwargs)
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
yield p
p.execute()
|
Move import statement on top for PEP8 compliancy.
|
Move import statement on top for PEP8 compliancy.
|
Python
|
mit
|
MegaByte875/redis-py,fengshao0907/redis-py,sigma-random/redis-py,sunminghong/redis-py,garnertb/redis-py,softliumin/redis-py,sirk390/redis-py,barseghyanartur/redis-py,zhangyancoder/redis-py,LTD-Beget/redis-py,boyxuper/redis-py,barseghyanartur/redis-py,dmugtasimov/redis-py,LTD-Beget/redis-py,yuruidong/redis-py,sigma-random/redis-py,VishvajitP/redis-py,boyxuper/redis-py,barseghyanartur/redis-py,thedrow/redis-py,siryuan525614/python_operation,ContextLogic/redis-py,andymccurdy/redis-py,Kazanz/redis-py,mozillazg/redis-py-doc,joshowen/redis-py,joshowen/redis-py,MrKiven/redis-py,rcrdclub/redis-py,ze-phyr-us/redis-py,RedisLabs/redis-py,maxikov/redis-py,dylanjw/redis-py,wfxiang08/redis-py,dmoliveira/redis-py,fengshao0907/redis-py,kaushik94/redis-py,sirk390/redis-py,siryuan525614/python_operation,RedisLabs/redis-py,JamieCressey/redispy,piperck/redis-py,kaushik94/redis-py,Kazanz/redis-py,5977862/redis-py,kouhou/redis-py,dmugtasimov/redis-py,wfxiang08/redis-py,fengsp/redis-py,ffrree/redis-py,harlowja/redis-py,mozillazg/redis-py-doc,VishvajitP/redis-py,sunminghong/redis-py,yuruidong/redis-py,alisaifee/redis-py,ferrero-zhang/redis-py,thedrow/redis-py,JamieCressey/redispy,MrKiven/redis-py,harlowja/redis-py,cvrebert/redis-py,lamby/redis-py,jparise/redis-py,fengsp/redis-py,piperck/redis-py,andymccurdy/redis-py,kouhou/redis-py,boyxuper/redis-py,ycaihua/redis-py,forblackking/redis-py,MegaByte875/redis-py,alisaifee/redis-py,yihuang/redis-py,ContextLogic/redis-py,5977862/redis-py,LTD-Beget/redis-py,softliumin/redis-py,ffrree/redis-py,kouhou/redis-py,redis/redis-py,joshowen/redis-py,nfvs/redis-py,ycaihua/redis-py,softliumin/redis-py,5977862/redis-py,andymccurdy/redis-py,pombredanne/redis-py,cvrebert/redis-py,JamieCressey/redispy,dmugtasimov/redis-py,maxikov/redis-py,nfvs/redis-py,fengshao0907/redis-py,ycaihua/redis-py,yuruidong/redis-py,cvrebert/redis-py,MegaByte875/redis-py,jparise/redis-py,ze-phyr-us/redis-py,sigma-random/redis-py,harlowja/redis-py,ContextLogic/redis-py,VishvajitP/redis-py,yihuang/redis-py,ze-phyr-us/redis-py,siryuan525614/python_operation,pombredanne/redis-py,nfvs/redis-py,fengsp/redis-py,Kazanz/redis-py,rcrdclub/redis-py,pombredanne/redis-py,MrKiven/redis-py,thedrow/redis-py,kaushik94/redis-py,ffrree/redis-py,sunminghong/redis-py,ferrero-zhang/redis-py,garnertb/redis-py,sirk390/redis-py,rcrdclub/redis-py,yihuang/redis-py,forblackking/redis-py,zhangyancoder/redis-py,dmoliveira/redis-py,redis/redis-py,garnertb/redis-py,zhangyancoder/redis-py,forblackking/redis-py,piperck/redis-py,dylanjw/redis-py,lamby/redis-py,ferrero-zhang/redis-py,jparise/redis-py,dylanjw/redis-py,dmoliveira/redis-py,wfxiang08/redis-py,maxikov/redis-py,lamby/redis-py
|
---
+++
@@ -1,3 +1,6 @@
+from contextlib import contextmanager
+
+
try:
import hiredis
HIREDIS_AVAILABLE = True
@@ -16,7 +19,6 @@
return Redis.from_url(url, db, **kwargs)
-from contextlib import contextmanager
@contextmanager
def pipeline(redis_obj):
p = redis_obj.pipeline()
|
a42ffdcd34876bcd1df81ce00dbfd6426580bd82
|
gaphor/UML/classes/copypaste.py
|
gaphor/UML/classes/copypaste.py
|
import itertools
from gaphor.diagram.copypaste import copy, copy_named_element
from gaphor.UML import Association, Class, Interface, Operation
@copy.register(Class)
@copy.register(Interface)
def copy_class(element):
yield element.id, copy_named_element(element)
for feature in itertools.chain(
element.ownedAttribute,
element.ownedOperation,
):
yield from copy(feature)
@copy.register
def copy_operation(element: Operation):
yield element.id, copy_named_element(element)
for feature in element.ownedParameter:
yield from copy(feature)
@copy.register
def copy_association(element: Association):
yield element.id, copy_named_element(element)
for end in element.memberEnd:
yield from copy(end)
|
import itertools
from gaphor.diagram.copypaste import copy, copy_named_element
from gaphor.UML import Association, Class, Enumeration, Interface, Operation
@copy.register(Class)
@copy.register(Interface)
def copy_class(element):
yield element.id, copy_named_element(element)
for feature in itertools.chain(
element.ownedAttribute,
element.ownedOperation,
):
yield from copy(feature)
@copy.register
def copy_enumeration(element: Enumeration):
yield element.id, copy_named_element(element)
for literal in element.ownedLiteral:
yield from copy(literal)
@copy.register
def copy_operation(element: Operation):
yield element.id, copy_named_element(element)
for feature in element.ownedParameter:
yield from copy(feature)
@copy.register
def copy_association(element: Association):
yield element.id, copy_named_element(element)
for end in element.memberEnd:
yield from copy(end)
|
Copy enumeration literals when pasting an Enumeration
|
Copy enumeration literals when pasting an Enumeration
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
---
+++
@@ -1,7 +1,7 @@
import itertools
from gaphor.diagram.copypaste import copy, copy_named_element
-from gaphor.UML import Association, Class, Interface, Operation
+from gaphor.UML import Association, Class, Enumeration, Interface, Operation
@copy.register(Class)
@@ -16,6 +16,13 @@
@copy.register
+def copy_enumeration(element: Enumeration):
+ yield element.id, copy_named_element(element)
+ for literal in element.ownedLiteral:
+ yield from copy(literal)
+
+
+@copy.register
def copy_operation(element: Operation):
yield element.id, copy_named_element(element)
for feature in element.ownedParameter:
|
e72156f50b0bab241b90b0f0c53414529740acd6
|
ds_binary_heap.py
|
ds_binary_heap.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class BinaryHeap(object):
def __init__(self):
# Put single zero as the 1st element, so that
# integer division can be used in later methods.
self.heap_ls = [0]
self.current_size = 0
def _percolate_up(self, i):
while i // 2 > 0:
if self.heap_ls[i] < self.heap_ls[i // 2]:
tmp = self.heap_ls[i // 2]
self.heap_ls[i // 2] = self.heap_ls[i]
self.heap_ls[i] = tmp
i = i // 2
def insert(self, new_node):
self.heap_ls.append(new_node)
self.current_size += 1
self._percolate_up(self.current_size)
def main():
pass
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
class BinaryHeap(object):
def __init__(self):
# Put single zero as the 1st element, so that
# integer division can be used in later methods.
self.heap_ls = [0]
self.current_size = 0
def _percolate_up(self, i):
while i // 2 > 0:
if self.heap_ls[i] < self.heap_ls[i // 2]:
tmp = self.heap_ls[i // 2]
self.heap_ls[i // 2] = self.heap_ls[i]
self.heap_ls[i] = tmp
i = i // 2
def insert(self, new_node):
self.heap_ls.append(new_node)
self.current_size += 1
self._percolate_up(self.current_size)
def _percolate_down(self, i):
pass
def _get_min_child(self, i):
pass
def delete_min(self):
pass
def main():
pass
if __name__ == '__main__':
main()
|
Add delete_min() and its helper func’s
|
Add delete_min() and its helper func’s
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
---
+++
@@ -22,6 +22,15 @@
self.current_size += 1
self._percolate_up(self.current_size)
+ def _percolate_down(self, i):
+ pass
+
+ def _get_min_child(self, i):
+ pass
+
+ def delete_min(self):
+ pass
+
def main():
pass
|
59536a70ef39e34a5aea57131492a475e05cd227
|
lg_cms_director/setup.py
|
lg_cms_director/setup.py
|
#!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.packages import find_packages
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=['trollius', 'pulsar'],
package_dir={'': 'src'},
scripts=[],
requires=[]
)
setup(**d)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
#!/usr/bin/env python
from distutils.core import setup
from catkin_pkg.packages import find_packages
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
packages=find_packages('src'),
package_dir={'': 'src'},
scripts=[],
requires=[]
)
setup(**d)
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
|
Revert to previous packaging of director's dependencies thx to @mvollrath
|
Revert to previous packaging of director's dependencies thx to @mvollrath
|
Python
|
apache-2.0
|
EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes
|
---
+++
@@ -5,7 +5,7 @@
from catkin_pkg.python_setup import generate_distutils_setup
d = generate_distutils_setup(
- packages=['trollius', 'pulsar'],
+ packages=find_packages('src'),
package_dir={'': 'src'},
scripts=[],
requires=[]
|
cc4a17db1e4ba81019ed312cbe324874430b9814
|
billybot/tests/test_billybot.py
|
billybot/tests/test_billybot.py
|
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
print(self.thread1.start())
self.assertTrue(1 == 2)
|
import time
import datetime
import unittest
from unittest.mock import patch, call
from billybot.billybot import MessageTriage
class TestMessageTriage(unittest.TestCase):
def setUp(self):
self.thread1 = MessageTriage('USERID1', 'user1', 'Warren', 'testchanl')
self.thread1.daemon = True
self.thread2 = MessageTriage('USERID2', 'user2', 'Markey', 'testchanl')
self.thread2.daemon = True
self.thread3 = MessageTriage('USERID3', 'user3', 'Capuano', 'testchanl')
self.thread3.daemon = True
def test_time_alive(self):
time.sleep(3)
time_alive = self.thread1.time_alive
# Checking that time alive is around 3 but it won't be 3
# exactly, so we check that it's between 2 and 4
self.assertTrue(time_alive > 2)
self.assertTrue(time_alive < 4)
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
mock_run.time_delay = lambda delay: time.sleep(delay)
mock_run.time_delay(5)
self.thread1.start()
self.assertTrue(1 == 2)
|
Use a lambda to add a time delay function onto the mocked run method
|
Use a lambda to add a time delay function onto the mocked run method
|
Python
|
mit
|
mosegontar/billybot
|
---
+++
@@ -29,5 +29,7 @@
@patch('billybot.billybot.MessageTriage.run')
def test_run(self, mock_run):
- print(self.thread1.start())
+ mock_run.time_delay = lambda delay: time.sleep(delay)
+ mock_run.time_delay(5)
+ self.thread1.start()
self.assertTrue(1 == 2)
|
4949b1051656566ce544a8240b0328a61259868a
|
migrations/versions/139_add_ns_index_to_contact_and_event.py
|
migrations/versions/139_add_ns_index_to_contact_and_event.py
|
"""Add compound index to Contact and Event
Revision ID: 1fd7b3e0b662
Revises: 5305d4ae30b4
Create Date: 2015-02-17 18:11:30.726188
"""
# revision identifiers, used by Alembic.
revision = '1fd7b3e0b662'
down_revision = '2d8a350b4885'
from alembic import op
def upgrade():
op.create_index(
'ix_contact_ns_uid_provider_name',
'contact',
['namespace_id', 'uid', 'provider_name'], unique=False)
op.create_index(
'ix_event_ns_uid_provider_name',
'event',
['namespace_id', 'uid', 'provider_name'], unique=False)
def downgrade():
raise Exception("Don't bother.")
|
"""Add compound index to Contact and Event
Revision ID: 1fd7b3e0b662
Revises: 5305d4ae30b4
Create Date: 2015-02-17 18:11:30.726188
"""
# revision identifiers, used by Alembic.
revision = '1fd7b3e0b662'
down_revision = '5305d4ae30b4'
from alembic import op
def upgrade():
op.create_index(
'ix_contact_ns_uid_provider_name',
'contact',
['namespace_id', 'uid', 'provider_name'], unique=False)
op.create_index(
'ix_event_ns_uid_provider_name',
'event',
['namespace_id', 'uid', 'provider_name'], unique=False)
def downgrade():
raise Exception("Don't bother.")
|
Fix migration history bug introduced with merge
|
Fix migration history bug introduced with merge
|
Python
|
agpl-3.0
|
wakermahmud/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,nylas/sync-engine,jobscore/sync-engine,nylas/sync-engine,jobscore/sync-engine,closeio/nylas,gale320/sync-engine,wakermahmud/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,nylas/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,jobscore/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,closeio/nylas,gale320/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,gale320/sync-engine
|
---
+++
@@ -8,7 +8,7 @@
# revision identifiers, used by Alembic.
revision = '1fd7b3e0b662'
-down_revision = '2d8a350b4885'
+down_revision = '5305d4ae30b4'
from alembic import op
|
de89049649fe720d45b271f519674845104f1941
|
flow_workflow/petri_net/future_nets/base.py
|
flow_workflow/petri_net/future_nets/base.py
|
from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class SimplifiedSuccessFailureNet(FutureNet):
def __init__(self, name=''):
FutureNet.__init__(self, name=name)
# Internal -- subclasses should connect to these
self.internal_start_transition = self.add_basic_transition('internal-start')
self.internal_failure_place = self.add_place('internal-failure')
self.internal_success_place = self.add_place('internal-success')
# Transitions to observe -- owners and subclasses may observe these
self.start_transition = self.add_basic_transition(name='start')
self.bridge_transitions(self.start_transition, self.internal_start_transition)
self.failure_transition = self.add_basic_transition(name='failure')
self.failure_transition.add_arc_in(self.internal_failure_place)
self.success_transition = self.add_basic_transition(name='success')
self.failure_transition.add_arc_in(self.internal_success_place)
class GenomeNetBase(SimplifiedSuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SimplifiedSuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
|
from flow.petri_net.future_net import FutureNet
from flow.petri_net.success_failure_net import SuccessFailureNet
class GenomeNetBase(SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
|
Make GenomeNetBase a SuccessFailureNet again
|
Make GenomeNetBase a SuccessFailureNet again
|
Python
|
agpl-3.0
|
genome/flow-workflow,genome/flow-workflow,genome/flow-workflow
|
---
+++
@@ -2,29 +2,8 @@
from flow.petri_net.success_failure_net import SuccessFailureNet
-class SimplifiedSuccessFailureNet(FutureNet):
- def __init__(self, name=''):
- FutureNet.__init__(self, name=name)
-
- # Internal -- subclasses should connect to these
- self.internal_start_transition = self.add_basic_transition('internal-start')
-
- self.internal_failure_place = self.add_place('internal-failure')
- self.internal_success_place = self.add_place('internal-success')
-
- # Transitions to observe -- owners and subclasses may observe these
- self.start_transition = self.add_basic_transition(name='start')
- self.bridge_transitions(self.start_transition, self.internal_start_transition)
-
- self.failure_transition = self.add_basic_transition(name='failure')
- self.failure_transition.add_arc_in(self.internal_failure_place)
-
- self.success_transition = self.add_basic_transition(name='success')
- self.failure_transition.add_arc_in(self.internal_success_place)
-
-
-class GenomeNetBase(SimplifiedSuccessFailureNet):
+class GenomeNetBase(SuccessFailureNet):
def __init__(self, name, operation_id, parent_operation_id=None):
- SimplifiedSuccessFailureNet.__init__(self, name=name)
+ SuccessFailureNet.__init__(self, name=name)
self.operation_id = operation_id
self.parent_operation_id = parent_operation_id
|
3486d3cb7122ba59c64d9af5b6eb6b12bc97e193
|
brew/utilities/efficiency.py
|
brew/utilities/efficiency.py
|
# -*- coding: utf-8 -*-
from .sugar import sg_to_gu
__all__ = [
u'calculate_brew_house_yield',
]
def calculate_brew_house_yield(wort_volume, sg, grain_additions):
"""
Calculate Brew House Yield
:param float wort_volume: The volume of the wort
:param float sg: THe specific gravity of the wort
:param list grain_additions: A list of grain additions in the wort
:type grain_additions: list of GrainAddition objects
:return: The brew house yield as a percentage
Brew House Yield is a function of the wort volume, the measured specific
gravity, and the grain additions used to make the wort. This equation is
thrown off by use of LME or DME since both have 100% efficiency in a brew.
A better measure is to look at just the grains that needed to be steeped
seperately and measure the specific gravity of that process.
"""
gravity_units = sum([grain_add.gu for grain_add in grain_additions])
return sg_to_gu(sg) * wort_volume / gravity_units
|
# -*- coding: utf-8 -*-
from ..constants import GRAIN_TYPE_DME
from ..constants import GRAIN_TYPE_LME
from .sugar import sg_to_gu
__all__ = [
u'calculate_brew_house_yield',
]
def calculate_brew_house_yield(wort_volume, sg, grain_additions):
"""
Calculate Brew House Yield
:param float wort_volume: The volume of the wort
:param float sg: THe specific gravity of the wort
:param list grain_additions: A list of grain additions in the wort
:type grain_additions: list of GrainAddition objects
:return: The brew house yield as a percentage
Brew House Yield is a function of the wort volume, the measured specific
gravity, and the grain additions used to make the wort. This equation is
thrown off by use of LME or DME since both have 100% efficiency in a brew.
A better measure is to look at just the grains that needed to be steeped
seperately and measure the specific gravity of that process.
"""
grain_adds_lme_dme = filter(lambda grain_add: grain_add.grain_type
in [GRAIN_TYPE_DME, GRAIN_TYPE_LME],
grain_additions)
grain_adds_other = filter(lambda grain_add: grain_add.grain_type
not in [GRAIN_TYPE_DME, GRAIN_TYPE_LME],
grain_additions)
gu_lme_dme = sum([grain_add.gu for grain_add in grain_adds_lme_dme])
gu_other = sum([grain_add.gu for grain_add in grain_adds_other])
return (sg_to_gu(sg) * wort_volume - gu_lme_dme) / gu_other
|
Clean up brew house yield calculator to deal with DME and LME additions
|
Clean up brew house yield calculator to deal with DME and LME additions
|
Python
|
mit
|
chrisgilmerproj/brewday,chrisgilmerproj/brewday
|
---
+++
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from ..constants import GRAIN_TYPE_DME
+from ..constants import GRAIN_TYPE_LME
from .sugar import sg_to_gu
__all__ = [
@@ -23,6 +25,13 @@
A better measure is to look at just the grains that needed to be steeped
seperately and measure the specific gravity of that process.
"""
+ grain_adds_lme_dme = filter(lambda grain_add: grain_add.grain_type
+ in [GRAIN_TYPE_DME, GRAIN_TYPE_LME],
+ grain_additions)
+ grain_adds_other = filter(lambda grain_add: grain_add.grain_type
+ not in [GRAIN_TYPE_DME, GRAIN_TYPE_LME],
+ grain_additions)
- gravity_units = sum([grain_add.gu for grain_add in grain_additions])
- return sg_to_gu(sg) * wort_volume / gravity_units
+ gu_lme_dme = sum([grain_add.gu for grain_add in grain_adds_lme_dme])
+ gu_other = sum([grain_add.gu for grain_add in grain_adds_other])
+ return (sg_to_gu(sg) * wort_volume - gu_lme_dme) / gu_other
|
e65a8c057d9dbd156222542a0e544d294292de00
|
thinglang/lexer/symbols/base.py
|
thinglang/lexer/symbols/base.py
|
from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, stack):
return stack[self.value]
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self")
|
from thinglang.utils.type_descriptors import ValueType
from thinglang.lexer.symbols import LexicalSymbol
class LexicalQuote(LexicalSymbol): # "
EMITTABLE = False
@classmethod
def next_operator_set(cls, current, original):
if current is original:
return {'"': LexicalQuote}
return original
class LexicalParenthesesOpen(LexicalSymbol):
pass # (
class LexicalParenthesesClose(LexicalSymbol):
pass # )
class LexicalBracketOpen(LexicalSymbol):
pass # [
class LexicalBracketClose(LexicalSymbol):
pass # ]
class LexicalSeparator(LexicalSymbol):
pass # ,
class LexicalIndent(LexicalSymbol):
pass # <TAB>
class LexicalAccess(LexicalSymbol):
pass # a.b
class LexicalInlineComment(LexicalSymbol): pass
class LexicalAssignment(LexicalSymbol): pass
class LexicalIdentifier(LexicalSymbol, ValueType):
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
def describe(self):
return self.value
def evaluate(self, resolver):
return resolver.resolve(self)
def __hash__(self):
return hash(self.value)
def __eq__(self, other):
return type(other) == type(self) and self.value == other.value
LexicalIdentifier.SELF = LexicalIdentifier("self")
|
Use new resolver in LexicalID resolution
|
Use new resolver in LexicalID resolution
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
---
+++
@@ -47,6 +47,7 @@
class LexicalIdentifier(LexicalSymbol, ValueType):
+
def __init__(self, value):
super(LexicalIdentifier, self).__init__(value)
self.value = value
@@ -54,8 +55,8 @@
def describe(self):
return self.value
- def evaluate(self, stack):
- return stack[self.value]
+ def evaluate(self, resolver):
+ return resolver.resolve(self)
def __hash__(self):
return hash(self.value)
|
e559c897cec534c58ab7940e2623a1decfb4958a
|
numpy/distutils/command/install_clib.py
|
numpy/distutils/command/install_clib.py
|
import os
from distutils.core import Command
from numpy.distutils.misc_util import get_cmd
class install_clib(Command):
description = "Command to install installable C libraries"
user_options = []
def initialize_options(self):
self.install_dir = None
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install', ('install_lib', 'install_dir'))
def run (self):
# We need the compiler to get the library name -> filename association
from distutils.ccompiler import new_compiler
compiler = new_compiler(compiler=None)
compiler.customize(self.distribution)
build_dir = get_cmd("build_clib").build_clib
for l in self.distribution.installed_libraries:
target_dir = os.path.join(self.install_dir, l.target_dir)
name = compiler.library_filename(l.name)
source = os.path.join(build_dir, name)
self.mkpath(target_dir)
self.outfiles.append(self.copy_file(source, target_dir)[0])
def get_outputs(self):
return self.outfiles
|
import os
from distutils.core import Command
from distutils.ccompiler import new_compiler
from numpy.distutils.misc_util import get_cmd
class install_clib(Command):
description = "Command to install installable C libraries"
user_options = []
def initialize_options(self):
self.install_dir = None
self.outfiles = []
def finalize_options(self):
self.set_undefined_options('install', ('install_lib', 'install_dir'))
def run (self):
# We need the compiler to get the library name -> filename association
compiler = new_compiler(compiler=None)
compiler.customize(self.distribution)
build_dir = get_cmd("build_clib").build_clib
for l in self.distribution.installed_libraries:
target_dir = os.path.join(self.install_dir, l.target_dir)
name = compiler.library_filename(l.name)
source = os.path.join(build_dir, name)
self.mkpath(target_dir)
self.outfiles.append(self.copy_file(source, target_dir)[0])
def get_outputs(self):
return self.outfiles
|
Move import at the top of module.
|
Move import at the top of module.
|
Python
|
bsd-3-clause
|
jorisvandenbossche/numpy,MichaelAquilina/numpy,b-carter/numpy,sonnyhu/numpy,GrimDerp/numpy,dch312/numpy,githubmlai/numpy,astrofrog/numpy,brandon-rhodes/numpy,joferkington/numpy,CMartelLML/numpy,rajathkumarmp/numpy,ahaldane/numpy,MaPePeR/numpy,empeeu/numpy,pelson/numpy,nguyentu1602/numpy,jakirkham/numpy,jakirkham/numpy,tdsmith/numpy,sonnyhu/numpy,stefanv/numpy,brandon-rhodes/numpy,ahaldane/numpy,sinhrks/numpy,ChristopherHogan/numpy,chiffa/numpy,SunghanKim/numpy,bertrand-l/numpy,MaPePeR/numpy,nguyentu1602/numpy,dwillmer/numpy,hainm/numpy,dato-code/numpy,tacaswell/numpy,numpy/numpy,SunghanKim/numpy,larsmans/numpy,bmorris3/numpy,solarjoe/numpy,NextThought/pypy-numpy,yiakwy/numpy,numpy/numpy,WarrenWeckesser/numpy,abalkin/numpy,rajathkumarmp/numpy,mingwpy/numpy,ChristopherHogan/numpy,pyparallel/numpy,stefanv/numpy,WillieMaddox/numpy,ChristopherHogan/numpy,sonnyhu/numpy,astrofrog/numpy,kirillzhuravlev/numpy,seberg/numpy,groutr/numpy,empeeu/numpy,MichaelAquilina/numpy,embray/numpy,has2k1/numpy,simongibbons/numpy,skwbc/numpy,trankmichael/numpy,WillieMaddox/numpy,mortada/numpy,seberg/numpy,KaelChen/numpy,cjermain/numpy,numpy/numpy-refactor,seberg/numpy,chatcannon/numpy,sigma-random/numpy,mingwpy/numpy,skwbc/numpy,astrofrog/numpy,skymanaditya1/numpy,Anwesh43/numpy,bringingheavendown/numpy,stuarteberg/numpy,andsor/numpy,Eric89GXL/numpy,moreati/numpy,jschueller/numpy,larsmans/numpy,skymanaditya1/numpy,BabeNovelty/numpy,jorisvandenbossche/numpy,sigma-random/numpy,githubmlai/numpy,sinhrks/numpy,Yusa95/numpy,empeeu/numpy,anntzer/numpy,dwf/numpy,ewmoore/numpy,groutr/numpy,kiwifb/numpy,dwf/numpy,argriffing/numpy,mortada/numpy,madphysicist/numpy,mindw/numpy,embray/numpy,shoyer/numpy,rgommers/numpy,BMJHayward/numpy,mhvk/numpy,numpy/numpy-refactor,ChanderG/numpy,tynn/numpy,Eric89GXL/numpy,pizzathief/numpy,WarrenWeckesser/numpy,has2k1/numpy,rhythmsosad/numpy,GaZ3ll3/numpy,stefanv/numpy,BabeNovelty/numpy,Linkid/numpy,jankoslavic/numpy,bringingheavendown/numpy,immerrr/numpy,grlee77/numpy,ajdawson/numpy,skymanaditya1/numpy,WarrenWeckesser/numpy,pelson/numpy,felipebetancur/numpy,madphysicist/numpy,has2k1/numpy,ChanderG/numpy,rmcgibbo/numpy,njase/numpy,CMartelLML/numpy,ewmoore/numpy,trankmichael/numpy,Eric89GXL/numpy,dato-code/numpy,rmcgibbo/numpy,madphysicist/numpy,grlee77/numpy,dch312/numpy,maniteja123/numpy,mathdd/numpy,Srisai85/numpy,kirillzhuravlev/numpy,Anwesh43/numpy,mathdd/numpy,Dapid/numpy,rhythmsosad/numpy,pbrod/numpy,nbeaver/numpy,solarjoe/numpy,trankmichael/numpy,stuarteberg/numpy,kiwifb/numpy,ddasilva/numpy,ddasilva/numpy,rajathkumarmp/numpy,yiakwy/numpy,pdebuyl/numpy,jorisvandenbossche/numpy,jorisvandenbossche/numpy,rudimeier/numpy,tynn/numpy,pyparallel/numpy,KaelChen/numpy,MichaelAquilina/numpy,hainm/numpy,musically-ut/numpy,b-carter/numpy,githubmlai/numpy,joferkington/numpy,mwiebe/numpy,GaZ3ll3/numpy,KaelChen/numpy,cjermain/numpy,grlee77/numpy,nbeaver/numpy,utke1/numpy,MSeifert04/numpy,madphysicist/numpy,matthew-brett/numpy,ViralLeadership/numpy,dimasad/numpy,grlee77/numpy,AustereCuriosity/numpy,ContinuumIO/numpy,rgommers/numpy,ekalosak/numpy,ESSS/numpy,rhythmsosad/numpy,BMJHayward/numpy,ContinuumIO/numpy,nbeaver/numpy,pelson/numpy,bringingheavendown/numpy,mattip/numpy,bmorris3/numpy,behzadnouri/numpy,Anwesh43/numpy,MichaelAquilina/numpy,bmorris3/numpy,drasmuss/numpy,dato-code/numpy,tynn/numpy,cjermain/numpy,rgommers/numpy,ssanderson/numpy,rherault-insa/numpy,skymanaditya1/numpy,mortada/numpy,utke1/numpy,SiccarPoint/numpy,embray/numpy,Yusa95/numpy,NextThought/pypy-numpy,SiccarPoint/numpy,mhvk/numpy,ssanderson/numpy,ahaldane/numpy,yiakwy/numpy,naritta/numpy,Srisai85/numpy,rmcgibbo/numpy,SunghanKim/numpy,matthew-brett/numpy,musically-ut/numpy,madphysicist/numpy,MaPePeR/numpy,bertrand-l/numpy,numpy/numpy,leifdenby/numpy,rgommers/numpy,leifdenby/numpy,moreati/numpy,numpy/numpy-refactor,njase/numpy,tdsmith/numpy,rherault-insa/numpy,matthew-brett/numpy,gfyoung/numpy,BMJHayward/numpy,immerrr/numpy,behzadnouri/numpy,njase/numpy,sinhrks/numpy,ViralLeadership/numpy,WillieMaddox/numpy,sigma-random/numpy,jonathanunderwood/numpy,ssanderson/numpy,andsor/numpy,ChristopherHogan/numpy,cowlicks/numpy,BabeNovelty/numpy,andsor/numpy,dato-code/numpy,WarrenWeckesser/numpy,ajdawson/numpy,gmcastil/numpy,anntzer/numpy,musically-ut/numpy,AustereCuriosity/numpy,KaelChen/numpy,abalkin/numpy,charris/numpy,hainm/numpy,immerrr/numpy,ekalosak/numpy,jankoslavic/numpy,AustereCuriosity/numpy,jakirkham/numpy,sonnyhu/numpy,BabeNovelty/numpy,CMartelLML/numpy,ESSS/numpy,rudimeier/numpy,hainm/numpy,dwf/numpy,SiccarPoint/numpy,ewmoore/numpy,astrofrog/numpy,dimasad/numpy,brandon-rhodes/numpy,mindw/numpy,shoyer/numpy,tacaswell/numpy,ewmoore/numpy,bmorris3/numpy,chatcannon/numpy,argriffing/numpy,kirillzhuravlev/numpy,seberg/numpy,CMartelLML/numpy,ogrisel/numpy,drasmuss/numpy,MSeifert04/numpy,ogrisel/numpy,jschueller/numpy,abalkin/numpy,bertrand-l/numpy,skwbc/numpy,Srisai85/numpy,gfyoung/numpy,pdebuyl/numpy,pizzathief/numpy,simongibbons/numpy,stefanv/numpy,numpy/numpy-refactor,pyparallel/numpy,astrofrog/numpy,GaZ3ll3/numpy,dwillmer/numpy,mindw/numpy,pizzathief/numpy,ekalosak/numpy,joferkington/numpy,rmcgibbo/numpy,rajathkumarmp/numpy,nguyentu1602/numpy,MSeifert04/numpy,mhvk/numpy,leifdenby/numpy,ESSS/numpy,jankoslavic/numpy,mwiebe/numpy,charris/numpy,brandon-rhodes/numpy,anntzer/numpy,solarjoe/numpy,NextThought/pypy-numpy,ogrisel/numpy,SunghanKim/numpy,mortada/numpy,jorisvandenbossche/numpy,maniteja123/numpy,kirillzhuravlev/numpy,shoyer/numpy,dimasad/numpy,kiwifb/numpy,tdsmith/numpy,gmcastil/numpy,simongibbons/numpy,jankoslavic/numpy,githubmlai/numpy,pdebuyl/numpy,behzadnouri/numpy,naritta/numpy,simongibbons/numpy,endolith/numpy,chiffa/numpy,jonathanunderwood/numpy,tdsmith/numpy,Yusa95/numpy,anntzer/numpy,pdebuyl/numpy,sigma-random/numpy,dwf/numpy,Dapid/numpy,pelson/numpy,naritta/numpy,matthew-brett/numpy,mingwpy/numpy,drasmuss/numpy,Srisai85/numpy,charris/numpy,charris/numpy,gmcastil/numpy,simongibbons/numpy,mattip/numpy,ewmoore/numpy,shoyer/numpy,ddasilva/numpy,embray/numpy,felipebetancur/numpy,empeeu/numpy,dch312/numpy,ChanderG/numpy,pelson/numpy,dimasad/numpy,mathdd/numpy,shoyer/numpy,argriffing/numpy,ajdawson/numpy,has2k1/numpy,Dapid/numpy,larsmans/numpy,chiffa/numpy,dwf/numpy,andsor/numpy,Yusa95/numpy,jakirkham/numpy,maniteja123/numpy,ahaldane/numpy,MaPePeR/numpy,endolith/numpy,pbrod/numpy,mattip/numpy,BMJHayward/numpy,pizzathief/numpy,utke1/numpy,cjermain/numpy,sinhrks/numpy,jakirkham/numpy,mattip/numpy,WarrenWeckesser/numpy,MSeifert04/numpy,chatcannon/numpy,ekalosak/numpy,ajdawson/numpy,matthew-brett/numpy,dch312/numpy,GrimDerp/numpy,cowlicks/numpy,pizzathief/numpy,mhvk/numpy,NextThought/pypy-numpy,mwiebe/numpy,mindw/numpy,Eric89GXL/numpy,numpy/numpy-refactor,pbrod/numpy,dwillmer/numpy,GrimDerp/numpy,cowlicks/numpy,Anwesh43/numpy,stuarteberg/numpy,ViralLeadership/numpy,Linkid/numpy,nguyentu1602/numpy,felipebetancur/numpy,gfyoung/numpy,ContinuumIO/numpy,jschueller/numpy,stefanv/numpy,SiccarPoint/numpy,mhvk/numpy,cowlicks/numpy,GrimDerp/numpy,ahaldane/numpy,larsmans/numpy,naritta/numpy,embray/numpy,ChanderG/numpy,rudimeier/numpy,pbrod/numpy,immerrr/numpy,moreati/numpy,jonathanunderwood/numpy,groutr/numpy,Linkid/numpy,dwillmer/numpy,MSeifert04/numpy,jschueller/numpy,rhythmsosad/numpy,ogrisel/numpy,pbrod/numpy,grlee77/numpy,b-carter/numpy,Linkid/numpy,stuarteberg/numpy,ogrisel/numpy,joferkington/numpy,rudimeier/numpy,yiakwy/numpy,trankmichael/numpy,GaZ3ll3/numpy,felipebetancur/numpy,endolith/numpy,endolith/numpy,mingwpy/numpy,mathdd/numpy,numpy/numpy,musically-ut/numpy,tacaswell/numpy,rherault-insa/numpy
|
---
+++
@@ -1,5 +1,6 @@
import os
from distutils.core import Command
+from distutils.ccompiler import new_compiler
from numpy.distutils.misc_util import get_cmd
class install_clib(Command):
@@ -16,7 +17,6 @@
def run (self):
# We need the compiler to get the library name -> filename association
- from distutils.ccompiler import new_compiler
compiler = new_compiler(compiler=None)
compiler.customize(self.distribution)
|
1d96f8a1456d902fe5a9e6ce5410a41b1468a810
|
thinglang/parser/tokens/logic.py
|
thinglang/parser/tokens/logic.py
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
COMPARATORS = {
LexicalEquality: lambda lhs, rhs: lhs == rhs
}
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.lhs, self.comparator, self.rhs = slice
self.comparator = self.COMPARATORS[type(self.comparator)]
def describe(self):
return 'if {} {} {}'.format(self.lhs, self.comparator, self.rhs)
def evaluate(self, stack):
return self.comparator(self.lhs.evaluate(stack), self.rhs.evaluate(stack))
class UnconditionalElse(BaseToken):
pass pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
from thinglang.lexer.symbols.logic import LexicalEquality
from thinglang.parser.tokens import BaseToken
class Conditional(BaseToken):
ADVANCE = False
def __init__(self, slice):
super(Conditional, self).__init__(slice)
_, self.value = slice
def describe(self):
return 'if {}'.format(self.value)
def evaluate(self, stack):
return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
pass
class ConditionalElse(Conditional):
def __init__(self, slice):
super(ConditionalElse, self).__init__(slice)
_, self.conditional = slice
def describe(self):
return 'otherwise if {}'.format(self.value)
|
Complete migration from conditional to arithmetic operation
|
Complete migration from conditional to arithmetic operation
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
---
+++
@@ -5,24 +5,20 @@
class Conditional(BaseToken):
ADVANCE = False
- COMPARATORS = {
- LexicalEquality: lambda lhs, rhs: lhs == rhs
- }
def __init__(self, slice):
super(Conditional, self).__init__(slice)
- _, self.lhs, self.comparator, self.rhs = slice
- self.comparator = self.COMPARATORS[type(self.comparator)]
+ _, self.value = slice
def describe(self):
- return 'if {} {} {}'.format(self.lhs, self.comparator, self.rhs)
+ return 'if {}'.format(self.value)
def evaluate(self, stack):
- return self.comparator(self.lhs.evaluate(stack), self.rhs.evaluate(stack))
+ return self.value.evaluate(stack)
class UnconditionalElse(BaseToken):
- pass pass
+ pass
class ConditionalElse(Conditional):
|
1f4d1fb8e8923609aae8e90d186d4e0e3ca40329
|
notifications/rest/bindings/list-binding/list-binding.6.x.py
|
notifications/rest/bindings/list-binding/list-binding.6.x.py
|
# NOTE: This example uses the next generation Twilio helper library - for more
# information on how to download and install this version, visit
# https://www.twilio.com/docs/libraries/python
from twilio.rest import Client
from datetime import datetime
# Your Account Sid and Auth Token from twilio.com/user/account
account = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
token = "your_auth_token"
client = Client(account, token)
bindings = client.notify.services("ISxxx").bindings.list(
tag="premium",
start_date=datetime.strptime("2015-08-25", "%Y-%m-%d")
)
for binding in bindings:
print(binding.sid)
|
# NOTE: This example uses the next generation Twilio helper library - for more
# information on how to download and install this version, visit
# https://www.twilio.com/docs/libraries/python
from twilio.rest import Client
from datetime import datetime
# Your Account Sid and Auth Token from twilio.com/user/account
account = "ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX"
token = "your_auth_token"
client = Client(account, token)
bindings = client.notify.services("ISxxx").bindings.list(
tag="new user",
start_date=datetime.strptime("2015-08-25", "%Y-%m-%d")
)
for binding in bindings:
print(binding.sid)
|
Update .py list-binding API snippet with 'new user' tag
|
Update .py list-binding API snippet with 'new user' tag
|
Python
|
mit
|
TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,TwilioDevEd/api-snippets,TwilioDevEd/api-snippets,teoreteetik/api-snippets,teoreteetik/api-snippets
|
---
+++
@@ -10,7 +10,7 @@
client = Client(account, token)
bindings = client.notify.services("ISxxx").bindings.list(
- tag="premium",
+ tag="new user",
start_date=datetime.strptime("2015-08-25", "%Y-%m-%d")
)
|
0d9085b0335fe52a8d9682d18c64b540e5a5e71e
|
tests/backends/gstreamer_test.py
|
tests/backends/gstreamer_test.py
|
import unittest
import os
from mopidy.models import Playlist, Track
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends.base import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
folder = os.path.dirname(__file__)
folder = os.path.join(folder, '..', 'data')
folder = os.path.abspath(folder)
song = os.path.join(folder, 'song%s.wav')
song = 'file://' + song
# FIXME can be switched to generic test
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
tracks = [Track(uri=song % i, id=i, length=4464) for i in range(1, 4)]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
tracks = [Track(uri=song % i, id=i, length=4464) for i in range(1, 4)]
backend_class = GStreamerBackend
if __name__ == '__main__':
unittest.main()
|
import unittest
import os
from mopidy.models import Playlist, Track
from mopidy.backends.gstreamer import GStreamerBackend
from tests.backends.base import (BasePlaybackControllerTest,
BaseCurrentPlaylistControllerTest)
folder = os.path.dirname(__file__)
folder = os.path.join(folder, '..', 'data')
folder = os.path.abspath(folder)
song = os.path.join(folder, 'song%s.wav')
song = 'file://' + song
# FIXME can be switched to generic test
class GStreamerCurrentPlaylistHandlerTest(BaseCurrentPlaylistControllerTest, unittest.TestCase):
tracks = [Track(uri=song % i, id=i, length=4464) for i in range(1, 4)]
backend_class = GStreamerBackend
class GStreamerPlaybackControllerTest(BasePlaybackControllerTest, unittest.TestCase):
tracks = [Track(uri=song % i, id=i, length=4464) for i in range(1, 4)]
backend_class = GStreamerBackend
def add_track(self, file):
uri = 'file://' + os.path.join(folder, file)
track = Track(uri=uri, id=1, length=4464)
self.backend.current_playlist.add(track)
def test_play_mp3(self):
self.add_track('blank.mp3')
self.playback.play()
self.assertEqual(self.playback.state, self.playback.PLAYING)
def test_play_ogg(self):
self.add_track('blank.ogg')
self.playback.play()
self.assertEqual(self.playback.state, self.playback.PLAYING)
def test_play_flac(self):
self.add_track('blank.flac')
self.playback.play()
self.assertEqual(self.playback.state, self.playback.PLAYING)
if __name__ == '__main__':
unittest.main()
|
Add test for mp3, ogg and flac support in gstreamer backend
|
Add test for mp3, ogg and flac support in gstreamer backend
|
Python
|
apache-2.0
|
vrs01/mopidy,jcass77/mopidy,kingosticks/mopidy,bacontext/mopidy,mokieyue/mopidy,jodal/mopidy,liamw9534/mopidy,mopidy/mopidy,liamw9534/mopidy,diandiankan/mopidy,jodal/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,quartz55/mopidy,adamcik/mopidy,jmarsik/mopidy,jcass77/mopidy,abarisain/mopidy,bencevans/mopidy,glogiotatidis/mopidy,bencevans/mopidy,priestd09/mopidy,tkem/mopidy,priestd09/mopidy,dbrgn/mopidy,rawdlite/mopidy,jmarsik/mopidy,vrs01/mopidy,pacificIT/mopidy,mopidy/mopidy,tkem/mopidy,kingosticks/mopidy,vrs01/mopidy,mokieyue/mopidy,woutervanwijk/mopidy,ali/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,bacontext/mopidy,ali/mopidy,woutervanwijk/mopidy,rawdlite/mopidy,rawdlite/mopidy,vrs01/mopidy,kingosticks/mopidy,adamcik/mopidy,bacontext/mopidy,hkariti/mopidy,mokieyue/mopidy,tkem/mopidy,pacificIT/mopidy,swak/mopidy,ZenithDK/mopidy,bacontext/mopidy,priestd09/mopidy,abarisain/mopidy,jcass77/mopidy,hkariti/mopidy,tkem/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,bencevans/mopidy,hkariti/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,ali/mopidy,diandiankan/mopidy,jmarsik/mopidy,quartz55/mopidy,ZenithDK/mopidy,swak/mopidy,ZenithDK/mopidy,dbrgn/mopidy,adamcik/mopidy,jodal/mopidy,quartz55/mopidy,quartz55/mopidy,jmarsik/mopidy,ali/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mopidy/mopidy,bencevans/mopidy,swak/mopidy,swak/mopidy,dbrgn/mopidy,ZenithDK/mopidy,rawdlite/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,hkariti/mopidy
|
---
+++
@@ -24,5 +24,25 @@
backend_class = GStreamerBackend
+ def add_track(self, file):
+ uri = 'file://' + os.path.join(folder, file)
+ track = Track(uri=uri, id=1, length=4464)
+ self.backend.current_playlist.add(track)
+
+ def test_play_mp3(self):
+ self.add_track('blank.mp3')
+ self.playback.play()
+ self.assertEqual(self.playback.state, self.playback.PLAYING)
+
+ def test_play_ogg(self):
+ self.add_track('blank.ogg')
+ self.playback.play()
+ self.assertEqual(self.playback.state, self.playback.PLAYING)
+
+ def test_play_flac(self):
+ self.add_track('blank.flac')
+ self.playback.play()
+ self.assertEqual(self.playback.state, self.playback.PLAYING)
+
if __name__ == '__main__':
unittest.main()
|
5606e8d56b7f6441eeb121795b0f400d65858b3b
|
tests/integration/test_fanout.py
|
tests/integration/test_fanout.py
|
import uuid
import diesel
from diesel.util.queue import Fanout
class FanoutHarness(object):
def setup(self):
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
diesel.sleep()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
import uuid
import diesel
from diesel.util.queue import Fanout
from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
diesel.fork(self.subscriber)
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
assert len(self.subscriber_data) == 10
for values in self.subscriber_data.itervalues():
assert values == range(10), values
def test_sub_is_removed_after_it_is_done(self):
assert not self.fan.subs
|
Add "done" tracking for fanout test.
|
Add "done" tracking for fanout test.
|
Python
|
bsd-3-clause
|
dieseldev/diesel
|
---
+++
@@ -3,9 +3,11 @@
import diesel
from diesel.util.queue import Fanout
+from diesel.util.event import Countdown
class FanoutHarness(object):
def setup(self):
+ self.done = Countdown(10)
self.fan = Fanout()
self.subscriber_data = {}
for x in xrange(10):
@@ -13,13 +15,14 @@
diesel.sleep()
for i in xrange(10):
self.fan.pub(i)
- diesel.sleep()
+ self.done.wait()
def subscriber(self):
self.subscriber_data[uuid.uuid4()] = data = []
with self.fan.sub() as q:
for i in xrange(10):
data.append(q.get())
+ self.done.tick()
class TestFanout(FanoutHarness):
def test_all_subscribers_get_the_published_messages(self):
|
76deb311dbb981501a1fa2686ec2cf4c92d7b83b
|
taggit/admin.py
|
taggit/admin.py
|
from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
|
from django.contrib import admin
from taggit.models import Tag, TaggedItem
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
TaggedItemInline
]
ordering = ['name']
admin.site.register(Tag, TagAdmin)
|
Remove extra inlines from django-taggit
|
Remove extra inlines from django-taggit
|
Python
|
bsd-3-clause
|
theatlantic/django-taggit,theatlantic/django-taggit2,theatlantic/django-taggit2,theatlantic/django-taggit,decibyte/django-taggit,decibyte/django-taggit
|
---
+++
@@ -5,6 +5,7 @@
class TaggedItemInline(admin.StackedInline):
model = TaggedItem
+ extra = 0
class TagAdmin(admin.ModelAdmin):
inlines = [
|
1862317c3b463704c8264f71007e7b910772b44e
|
tests/test_pprint.py
|
tests/test_pprint.py
|
import pytest
from mappyfile.pprint import PrettyPrinter
def test_print_map():
mf = {}
pp = PrettyPrinter() # expected
txt = pp.pprint(mf)
assert(expected == txt)
def run_tests():
#pytest.main(["tests/test_pprint.py::test_print_map"])
pytest.main(["tests/test_pprint.py"])
if __name__ == "__main__":
run_tests()
|
import pytest
from mappyfile.pprint import PrettyPrinter
import mappyfile
def test_format_list():
s = """
CLASS
STYLE
COLOR 173 216 230
END
STYLE
OUTLINECOLOR 2 2 2
WIDTH 1
LINECAP BUTT
PATTERN
5 5
10 10
END
END
END
"""
ast = mappyfile.loads(s)
#print ast
pp = PrettyPrinter(indent=0) # expected
k = "pattern"
lst = [[5, 5, 10, 10]]
assert(pp.is_paired_list(k))
r = pp.process_list(k, lst, 0)
exp = [u'PATTERN', '5 5\n10 10', u'END']
assert(r == exp)
def run_tests():
pytest.main(["tests/test_pprint.py::test_format_list"])
#pytest.main(["tests/test_pprint.py"])
if __name__ == "__main__":
#run_tests()
test_format_list()
|
Add pair list formatting test
|
Add pair list formatting test
|
Python
|
mit
|
Jenselme/mappyfile,geographika/mappyfile,geographika/mappyfile
|
---
+++
@@ -1,16 +1,43 @@
import pytest
from mappyfile.pprint import PrettyPrinter
+import mappyfile
-def test_print_map():
- mf = {}
- pp = PrettyPrinter() # expected
- txt = pp.pprint(mf)
- assert(expected == txt)
+def test_format_list():
+
+ s = """
+ CLASS
+ STYLE
+ COLOR 173 216 230
+ END
+ STYLE
+ OUTLINECOLOR 2 2 2
+ WIDTH 1
+ LINECAP BUTT
+ PATTERN
+ 5 5
+ 10 10
+ END
+ END
+ END
+ """
+
+ ast = mappyfile.loads(s)
+ #print ast
+
+ pp = PrettyPrinter(indent=0) # expected
+
+ k = "pattern"
+ lst = [[5, 5, 10, 10]]
+
+ assert(pp.is_paired_list(k))
+ r = pp.process_list(k, lst, 0)
+ exp = [u'PATTERN', '5 5\n10 10', u'END']
+ assert(r == exp)
def run_tests():
- #pytest.main(["tests/test_pprint.py::test_print_map"])
- pytest.main(["tests/test_pprint.py"])
+ pytest.main(["tests/test_pprint.py::test_format_list"])
+ #pytest.main(["tests/test_pprint.py"])
if __name__ == "__main__":
- run_tests()
-
+ #run_tests()
+ test_format_list()
|
207bb83a7a41a36dbe27cb4b75f93fe0ae3a5625
|
sgext/util/aws.py
|
sgext/util/aws.py
|
# -*- coding: utf-8 -*-
#
# © 2011 SimpleGeo, Inc. All rights reserved.
# Author: Paul Lathrop <paul@simplegeo.com>
#
"""Utility functions for AWS-related tasks."""
from getpass import getpass
import boto.pyami.config as boto_config
def get_credentials(batch=False):
"""Return a tuple (key, secret) of AWS credentials. Credentials
are loaded via boto first (which checks the environment and a
couple well-known files). If boto cannot find any credentials, and
the 'batch' kwarg is set to False, this method will request
credentials from the user interactively via the console."""
config = boto_config.Config()
key = config.get('Credentials', 'aws_access_key_id', False)
secret = config.get('Credentials', 'aws_secret_access_key', False)
if key and secret:
return (key, secret)
if batch:
return None
return prompt_for_credentials()
def prompt_for_credentials():
"""Prompt the user to enter their AWS credentials, and return them
as a tuple of (key, secret)."""
print 'Could not load AWS credentials from environment or boto configuration.'
print 'Please enter your AWS credentials.'
print
key = raw_input('AWS Access Key ID: ')
secret = getpass('AWS Secret Access Key: ')
return (key, secret)
|
# -*- coding: utf-8 -*-
#
# © 2011 SimpleGeo, Inc. All rights reserved.
# Author: Paul Lathrop <paul@simplegeo.com>
#
"""Utility functions for AWS-related tasks."""
from getpass import getpass
import boto.pyami.config as boto_config
def get_credentials(batch=False):
"""Return a dictionary of AWS credentials. Credentials are loaded
via boto first (which checks the environment and a couple
well-known files). If boto cannot find any credentials, and the
'batch' kwarg is set to False, this method will request
credentials from the user interactively via the console."""
config = boto_config.Config()
key = config.get('Credentials', 'aws_access_key_id', False)
secret = config.get('Credentials', 'aws_secret_access_key', False)
if key and secret:
return {'aws_access_key_id': key,
'aws_secret_access_key': secret}
if batch:
return None
return prompt_for_credentials()
def prompt_for_credentials():
"""Prompt the user to enter their AWS credentials, and return them
as a dictionary."""
print 'Could not load AWS credentials from environment or boto configuration.'
print 'Please enter your AWS credentials.'
print
key = raw_input('AWS Access Key ID: ')
secret = getpass('AWS Secret Access Key: ')
return {'aws_access_key_id': key,
'aws_secret_access_key': secret}
|
Fix get_credentials so it returns a dict.
|
Fix get_credentials so it returns a dict.
|
Python
|
bsd-2-clause
|
simplegeo/clusto-sgext
|
---
+++
@@ -12,16 +12,17 @@
def get_credentials(batch=False):
- """Return a tuple (key, secret) of AWS credentials. Credentials
- are loaded via boto first (which checks the environment and a
- couple well-known files). If boto cannot find any credentials, and
- the 'batch' kwarg is set to False, this method will request
+ """Return a dictionary of AWS credentials. Credentials are loaded
+ via boto first (which checks the environment and a couple
+ well-known files). If boto cannot find any credentials, and the
+ 'batch' kwarg is set to False, this method will request
credentials from the user interactively via the console."""
config = boto_config.Config()
key = config.get('Credentials', 'aws_access_key_id', False)
secret = config.get('Credentials', 'aws_secret_access_key', False)
if key and secret:
- return (key, secret)
+ return {'aws_access_key_id': key,
+ 'aws_secret_access_key': secret}
if batch:
return None
return prompt_for_credentials()
@@ -29,10 +30,11 @@
def prompt_for_credentials():
"""Prompt the user to enter their AWS credentials, and return them
- as a tuple of (key, secret)."""
+ as a dictionary."""
print 'Could not load AWS credentials from environment or boto configuration.'
print 'Please enter your AWS credentials.'
print
key = raw_input('AWS Access Key ID: ')
secret = getpass('AWS Secret Access Key: ')
- return (key, secret)
+ return {'aws_access_key_id': key,
+ 'aws_secret_access_key': secret}
|
20a92ff1ffe143193d95235c7a5ea8e9edb0df64
|
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
|
yowsup/layers/protocol_acks/protocolentities/ack_outgoing.py
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
'''
def __init__(self, _id, _class, _type, _to):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to)
def setOutgoingData(self, _type, _to):
self._type = _type
self._to = _to
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to")
)
return entity
|
from yowsup.structs import ProtocolEntity, ProtocolTreeNode
from .ack import AckProtocolEntity
class OutgoingAckProtocolEntity(AckProtocolEntity):
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
<ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
</ack>
'''
def __init__(self, _id, _class, _type, _to, _participant = None):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
self.setOutgoingData(_type, _to, _participant)
def setOutgoingData(self, _type, _to, _participant):
self._type = _type
self._to = _to
self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
if self._participant:
node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
if self._participant:
out += "Participant: %s\n" % self._participant
return out
@staticmethod
def fromProtocolTreeNode(node):
entity = AckProtocolEntity.fromProtocolTreeNode(node)
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
node.getAttributeValue("to"),
node.getAttributeValue("participant")
)
return entity
|
Include participant in outgoing ack
|
Include participant in outgoing ack
|
Python
|
mit
|
ongair/yowsup,biji/yowsup
|
---
+++
@@ -5,27 +5,36 @@
'''
<ack type="{{delivery | read}}" class="{{message | receipt | ?}}" id="{{MESSAGE_ID}} to={{TO_JID}}">
</ack>
+
+ <ack to="{{GROUP_JID}}" participant="{{JID}}" id="{{MESSAGE_ID}}" class="receipt" type="{{read | }}">
+ </ack>
+
'''
- def __init__(self, _id, _class, _type, _to):
+ def __init__(self, _id, _class, _type, _to, _participant = None):
super(OutgoingAckProtocolEntity, self).__init__(_id, _class)
- self.setOutgoingData(_type, _to)
+ self.setOutgoingData(_type, _to, _participant)
- def setOutgoingData(self, _type, _to):
+ def setOutgoingData(self, _type, _to, _participant):
self._type = _type
self._to = _to
+ self._participant = _participant
def toProtocolTreeNode(self):
node = super(OutgoingAckProtocolEntity, self).toProtocolTreeNode()
if self._type:
node.setAttribute("type", self._type)
node.setAttribute("to", self._to)
+ if self._participant:
+ node.setAttribute("participant", self._participant)
return node
def __str__(self):
out = super(OutgoingAckProtocolEntity, self).__str__()
out += "Type: %s\n" % self._type
out += "To: %s\n" % self._to
+ if self._participant:
+ out += "Participant: %s\n" % self._participant
return out
@staticmethod
@@ -34,6 +43,7 @@
entity.__class__ = OutgoingAckProtocolEntity
entity.setOutgoingData(
node.getAttributeValue("type"),
- node.getAttributeValue("to")
+ node.getAttributeValue("to"),
+ node.getAttributeValue("participant")
)
return entity
|
30907bbfc1b6d38034867070075a28fd3c5d3c6b
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/cli.py
|
{{cookiecutter.repo_name}}/{{cookiecutter.repo_name}}/cli.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
os.environ["KIVY_NO_ARGS"] = "1"
import click
from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
@click.command()
@click.option(
'-l', '--language', help='Default language of the App', default='en',
type=click.Choice(['en', 'es', 'de', 'fr'])
)
def main(language):
"""Run {{cookiecutter.app_class_name}} with the given language setting.
"""
{{cookiecutter.app_class_name}}(language).run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
os.environ["KIVY_NO_ARGS"] = "1"
import click
from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
@click.command()
@click.option(
'-l', '--language', help='Default language of the App', default='en',
type=click.Choice(['en', 'es', 'de', 'fr'])
)
def main(language):
"""Run {{cookiecutter.app_class_name}} with the given language setting.
"""
{{cookiecutter.app_class_name}}(language).run()
|
Use absolute path for app import
|
Use absolute path for app import
|
Python
|
mit
|
hackebrot/cookiedozer,hackebrot/cookiedozer
|
---
+++
@@ -6,7 +6,7 @@
import click
-from {{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
+from {{cookiecutter.repo_name}}.{{cookiecutter.repo_name}} import {{cookiecutter.app_class_name}}
@click.command()
|
f872bd95e9a26b326ea49922a373f90d73d0df2f
|
show_usbcamera.py
|
show_usbcamera.py
|
#! /usr/bin/env python
#
# Show the USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
|
#! /usr/bin/env python
# -*- coding:utf-8 -*-
#
# Show the images from a USB camera
#
#
# External dependencies
#
import sys
from PySide import QtGui
import VisionToolkit as vtk
#
# Main application
#
if __name__ == '__main__' :
application = QtGui.QApplication( sys.argv )
widget = vtk.UsbCameraWidget()
widget.show()
sys.exit( application.exec_() )
|
Change character coding and a comment.
|
Change character coding and a comment.
|
Python
|
mit
|
microy/StereoVision,microy/VisionToolkit,microy/StereoVision,microy/PyStereoVisionToolkit,microy/PyStereoVisionToolkit,microy/VisionToolkit
|
---
+++
@@ -1,8 +1,9 @@
#! /usr/bin/env python
+# -*- coding:utf-8 -*-
#
-# Show the USB camera
+# Show the images from a USB camera
#
|
fc0ee0d418496f0ec8da01e8bd8e2d12024accaa
|
simulate_loads.py
|
simulate_loads.py
|
import random
import itertools as it
from sklearn.externals import joblib
def simulate_loads(nfrag, ngt, q):
loads = [1] * nfrag
active = set(range(nfrag))
for k in range(1, len(loads)):
i0, i1 = random.sample(active, k=2)
active.remove(i0)
active.remove(i1)
active.add(len(loads))
if random.random() > q: # correct merge
new_load = max(loads[i0], loads[i1])
else: # false merge
new_load = min(loads[i0] + loads[i1], ngt)
loads.append(new_load)
return loads
def many_sims(n_jobs=2):
qs = [.025, .05, .1, .2]
nfrags = [10000, 20000, 40000, 80000, 160000]
nreps = 5
keys = [(n, q) for n, q, i in it.product(nfrags, qs, range(nreps))]
results = joblib.Parallel(n_jobs=n_jobs)(
joblib.delayed(simulate_loads)(n, 1000, q) for n, q in keys
)
return dict(zip(keys, results))
|
import pickle
import random
import itertools as it
from sklearn.externals import joblib
def simulate_loads(nfrag, ngt, q):
loads = [1] * nfrag
active = set(range(nfrag))
for k in range(1, len(loads)):
i0, i1 = random.sample(active, k=2)
active.remove(i0)
active.remove(i1)
active.add(len(loads))
if random.random() > q: # correct merge
new_load = max(loads[i0], loads[i1])
else: # false merge
new_load = min(loads[i0] + loads[i1], ngt)
loads.append(new_load)
return loads
def many_sims(n_jobs=2):
qs = [.025, .05, .1, .2]
nfrags = [10000, 20000, 40000, 80000, 160000]
nreps = 5
keys = [(n, q) for n, q, i in it.product(nfrags, qs, range(nreps))]
results = joblib.Parallel(n_jobs=n_jobs)(
joblib.delayed(simulate_loads)(n, 1000, q) for n, q in keys
)
return dict(zip(keys, results))
if __name__ == '__main__':
import sys
if len(sys.argv) > 1:
n_jobs = int(sys.argv[1])
result = many_sims(n_jobs)
with open('sim-results.pickle', 'wb') as fout:
pickle.dump(result, fout, protocol=pickle.HIGHEST_PROTOCOL)
|
Add main script to CSR simulations
|
Add main script to CSR simulations
|
Python
|
bsd-3-clause
|
jni/gala-scripts
|
---
+++
@@ -1,3 +1,4 @@
+import pickle
import random
import itertools as it
from sklearn.externals import joblib
@@ -27,3 +28,12 @@
joblib.delayed(simulate_loads)(n, 1000, q) for n, q in keys
)
return dict(zip(keys, results))
+
+
+if __name__ == '__main__':
+ import sys
+ if len(sys.argv) > 1:
+ n_jobs = int(sys.argv[1])
+ result = many_sims(n_jobs)
+ with open('sim-results.pickle', 'wb') as fout:
+ pickle.dump(result, fout, protocol=pickle.HIGHEST_PROTOCOL)
|
bdae1f203f3d5d600dd62470dea3d2ddb3048b9d
|
turbustat/tests/test_wavelet.py
|
turbustat/tests/test_wavelet.py
|
# Licensed under an MIT open source license - see LICENSE
'''
Test function for Wavelet
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import wt2D, Wavelet_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testWavelet(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_Wavelet_method(self):
self.tester = wt2D(dataset1["integrated_intensity"][0],
dataset1["integrated_intensity"][1])
self.tester.run()
assert np.allclose(self.tester.curve, computed_data['wavelet_val'])
def test_Wavelet_distance(self):
self.tester_dist = \
Wavelet_Distance(dataset1["integrated_intensity"],
dataset2["integrated_intensity"]).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['wavelet_distance'])
|
# Licensed under an MIT open source license - see LICENSE
'''
Test function for Wavelet
'''
from unittest import TestCase
import numpy as np
import numpy.testing as npt
from ..statistics import Wavelet, Wavelet_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
class testWavelet(TestCase):
def setUp(self):
self.dataset1 = dataset1
self.dataset2 = dataset2
def test_Wavelet_method(self):
self.tester = Wavelet(dataset1["integrated_intensity"][0],
dataset1["integrated_intensity"][1])
self.tester.run()
assert np.allclose(self.tester.curve, computed_data['wavelet_val'])
def test_Wavelet_distance(self):
self.tester_dist = \
Wavelet_Distance(dataset1["integrated_intensity"],
dataset2["integrated_intensity"]).distance_metric()
npt.assert_almost_equal(self.tester_dist.distance,
computed_distances['wavelet_distance'])
|
Change class name in tests
|
Change class name in tests
|
Python
|
mit
|
e-koch/TurbuStat,Astroua/TurbuStat
|
---
+++
@@ -10,7 +10,7 @@
import numpy as np
import numpy.testing as npt
-from ..statistics import wt2D, Wavelet_Distance
+from ..statistics import Wavelet, Wavelet_Distance
from ._testing_data import \
dataset1, dataset2, computed_data, computed_distances
@@ -22,8 +22,8 @@
self.dataset2 = dataset2
def test_Wavelet_method(self):
- self.tester = wt2D(dataset1["integrated_intensity"][0],
- dataset1["integrated_intensity"][1])
+ self.tester = Wavelet(dataset1["integrated_intensity"][0],
+ dataset1["integrated_intensity"][1])
self.tester.run()
assert np.allclose(self.tester.curve, computed_data['wavelet_val'])
|
293f44e211e4f26a0b7eca842dd2af515957a4bd
|
octavia/certificates/generator/cert_gen.py
|
octavia/certificates/generator/cert_gen.py
|
# Copyright (c) 2014 Rackspace US, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Certificate Generator API
"""
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: Signed certificate
:raises Exception: If certificate signing fails
"""
pass
|
# Copyright (c) 2014 Rackspace US, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Certificate Generator API
"""
import abc
import six
@six.add_metaclass(abc.ABCMeta)
class CertGenerator(object):
"""Base Cert Generator Interface
A Certificate Generator is responsible for generating private keys,
generating CSRs, and signing TLS certificates.
"""
@abc.abstractmethod
def sign_cert(self, csr, validity):
"""Generates a signed certificate from the provided CSR
This call is designed to block until a signed certificate can be
returned.
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
:return: PEM Encoded Signed certificate
:raises Exception: If certificate signing fails
"""
pass
@abc.abstractmethod
def generate_cert_key_pair(self, cn, validity, bit_length, passphrase):
"""Generates a private key and certificate pair
:param cn: Common name to use for the Certificate
:param validity: Validity period for the Certificate
:param bit_length: Private key bit length
:param passphrase: Passphrase to use for encrypting the private key
:return: octavia.certificates.common.Cert representation of the
certificate data
:raises Exception: If generation fails
"""
pass
|
Add Cert+PK generation to Certificate Interface
|
Add Cert+PK generation to Certificate Interface
Change-Id: I82aa573c7db13c7a491b18540379b234c1023eb9
|
Python
|
apache-2.0
|
openstack/octavia,openstack/octavia,openstack/octavia
|
---
+++
@@ -25,7 +25,8 @@
class CertGenerator(object):
"""Base Cert Generator Interface
- A Certificate Generator is responsible for signing TLS certificates.
+ A Certificate Generator is responsible for generating private keys,
+ generating CSRs, and signing TLS certificates.
"""
@abc.abstractmethod
@@ -38,7 +39,22 @@
:param csr: A Certificate Signing Request
:param validity: Valid for <validity> seconds from the current time
- :return: Signed certificate
+ :return: PEM Encoded Signed certificate
:raises Exception: If certificate signing fails
"""
pass
+
+ @abc.abstractmethod
+ def generate_cert_key_pair(self, cn, validity, bit_length, passphrase):
+ """Generates a private key and certificate pair
+
+ :param cn: Common name to use for the Certificate
+ :param validity: Validity period for the Certificate
+ :param bit_length: Private key bit length
+ :param passphrase: Passphrase to use for encrypting the private key
+
+ :return: octavia.certificates.common.Cert representation of the
+ certificate data
+ :raises Exception: If generation fails
+ """
+ pass
|
44d103359cff312865f409ff34f528f63e441ef4
|
graphapi/views.py
|
graphapi/views.py
|
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
# if not show_graphiql:
# error = verify_request(request, 'graphapi')
# if error:
# print('graphapi/views: get_response bailed ')
# return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
|
from simplekeys.verifier import verify_request
from graphene_django.views import GraphQLView
from django.conf import settings
class KeyedGraphQLView(GraphQLView):
graphiql_template = "graphene/graphiql-keyed.html"
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
if not show_graphiql:
error = verify_request(request, 'graphapi')
if error:
return error, error.status_code
return super().get_response(request, data, show_graphiql)
def render_graphiql(self, request, **data):
data['demo_key'] = settings.GRAPHQL_DEMO_KEY
return super().render_graphiql(request, **data)
|
Revert "Reimplement using explicit variable lookup"
|
Revert "Reimplement using explicit variable lookup"
This reverts commit 94683e6c
|
Python
|
mit
|
openstates/openstates.org,openstates/openstates.org,openstates/openstates.org,openstates/openstates.org
|
---
+++
@@ -8,11 +8,10 @@
def get_response(self, request, data, show_graphiql=False):
# check key only if we're not handling a graphiql request
- # if not show_graphiql:
- # error = verify_request(request, 'graphapi')
- # if error:
- # print('graphapi/views: get_response bailed ')
- # return error, error.status_code
+ if not show_graphiql:
+ error = verify_request(request, 'graphapi')
+ if error:
+ return error, error.status_code
return super().get_response(request, data, show_graphiql)
|
fc508462e3fa9b03f0ee55df21c44863fbd8bae0
|
tests/providers/phone_number.py
|
tests/providers/phone_number.py
|
# coding=utf-8
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class TestJaJP(unittest.TestCase):
""" Tests phone_number in the ja_JP locale """
def setUp(self):
self.factory = Factory.create('ja')
def test_phone_number(self):
pn = self.factory.phone_number()
formats = ('070', '080', '090')
assert pn
assert isinstance(pn, string_types)
first, second, third = pn.split('-')
assert first
assert first.isdigit()
assert second
assert second.isdigit()
assert third
assert third.isdigit()
if len(first) == 2:
assert len(second) == 4
assert len(third) == 4
else:
assert len(first) == 3
assert len(second) == 4
assert len(third) == 4
assert first in formats
|
# coding=utf-8
from __future__ import unicode_literals
import unittest
from faker import Factory
from .. import string_types
class TestJaJP(unittest.TestCase):
""" Tests phone_number in the ja_JP locale """
def setUp(self):
self.factory = Factory.create('ja')
def test_phone_number(self):
pn = self.factory.phone_number()
formats = ('070', '080', '090')
assert pn
assert isinstance(pn, string_types)
first, second, third = pn.split('-')
assert first
assert first.isdigit()
assert second
assert second.isdigit()
assert third
assert third.isdigit()
if len(first) == 2:
assert len(second) == 4
assert len(third) == 4
else:
assert len(first) == 3
assert len(second) == 4
assert len(third) == 4
assert first in formats
class TestMSISDN(unittest.TestCase):
""" Tests MSISDN in the pt_br locale """
def setUp(self):
self.factory = Factory.create('pt_br')
def test_msisdn(self):
msisdn = self.factory.msisdn()
formats = ('5511', '5521', '5531', '5541', '5551', '5561', '5571', '5581')
assert msisdn is not None
assert isinstance(msisdn, string_types)
assert len(msisdn) == 13
assert msisdn.isdigit()
assert msisdn[0:4] in formats
|
Add the msisdn provider test
|
Add the msisdn provider test
|
Python
|
mit
|
joke2k/faker,danhuss/faker,joke2k/faker
|
---
+++
@@ -35,3 +35,21 @@
assert len(second) == 4
assert len(third) == 4
assert first in formats
+
+
+
+class TestMSISDN(unittest.TestCase):
+ """ Tests MSISDN in the pt_br locale """
+
+ def setUp(self):
+ self.factory = Factory.create('pt_br')
+
+ def test_msisdn(self):
+ msisdn = self.factory.msisdn()
+ formats = ('5511', '5521', '5531', '5541', '5551', '5561', '5571', '5581')
+
+ assert msisdn is not None
+ assert isinstance(msisdn, string_types)
+ assert len(msisdn) == 13
+ assert msisdn.isdigit()
+ assert msisdn[0:4] in formats
|
48081a925d5b69e18a1f04c74cbe98b590e77c5b
|
tests/unit/test_pylama_isort.py
|
tests/unit/test_pylama_isort.py
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
|
import os
from isort.pylama_isort import Linter
class TestLinter:
instance = Linter()
def test_allow(self):
assert not self.instance.allow("test_case.pyc")
assert not self.instance.allow("test_case.c")
assert self.instance.allow("test_case.py")
def test_run(self, src_dir, tmpdir):
assert not self.instance.run(os.path.join(src_dir, "api.py"))
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
def test_skip(self, src_dir, tmpdir):
incorrect = tmpdir.join("incorrect.py")
incorrect.write("# isort: skip_file\nimport b\nimport a\n")
assert not self.instance.run(str(incorrect))
|
Add a test for skip functionality
|
Add a test for skip functionality
|
Python
|
mit
|
PyCQA/isort,PyCQA/isort
|
---
+++
@@ -17,3 +17,8 @@
incorrect = tmpdir.join("incorrect.py")
incorrect.write("import b\nimport a\n")
assert self.instance.run(str(incorrect))
+
+ def test_skip(self, src_dir, tmpdir):
+ incorrect = tmpdir.join("incorrect.py")
+ incorrect.write("# isort: skip_file\nimport b\nimport a\n")
+ assert not self.instance.run(str(incorrect))
|
ca0d9e9651e51797ad317e54c58a174bcb351610
|
channels/__init__.py
|
channels/__init__.py
|
__version__ = "0.9"
default_app_config = 'channels.apps.ChannelsConfig'
DEFAULT_CHANNEL_LAYER = 'default'
from .asgi import channel_layers # NOQA isort:skip
from .channel import Channel, Group # NOQA isort:skip
|
__version__ = "0.9"
default_app_config = 'channels.apps.ChannelsConfig'
DEFAULT_CHANNEL_LAYER = 'default'
try:
from .asgi import channel_layers # NOQA isort:skip
from .channel import Channel, Group # NOQA isort:skip
except ImportError: # No django installed, allow vars to be read
pass
|
Fix version import during pip install
|
Fix version import during pip install
|
Python
|
bsd-3-clause
|
raphael-boucher/channels,Coread/channels,Coread/channels,django/channels,Krukov/channels,andrewgodwin/channels,Krukov/channels,andrewgodwin/django-channels,raiderrobert/channels,linuxlewis/channels
|
---
+++
@@ -3,5 +3,8 @@
default_app_config = 'channels.apps.ChannelsConfig'
DEFAULT_CHANNEL_LAYER = 'default'
-from .asgi import channel_layers # NOQA isort:skip
-from .channel import Channel, Group # NOQA isort:skip
+try:
+ from .asgi import channel_layers # NOQA isort:skip
+ from .channel import Channel, Group # NOQA isort:skip
+except ImportError: # No django installed, allow vars to be read
+ pass
|
b7bb933b23b5b86a2555e5fdd8919f852960a4cb
|
firecares/firestation/management/commands/export-building-fires.py
|
firecares/firestation/management/commands/export-building-fires.py
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """
\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
|
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
class Command(BaseCommand):
"""
This command is used to export data that department heat maps visualize.
"""
help = 'Creates a sql file to export building fires from.'
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y, COALESCE(b.risk_category, 'Unknown') as risk_category from buildingfires a left join (SELECT * FROM (SELECT state, fdid, inc_date, inc_no, exp_no, geom, b.parcel_id, b.risk_category, ROW_NUMBER() OVER (PARTITION BY state, fdid, inc_date, inc_no, exp_no, geom ORDER BY st_distance(st_centroid(b.wkb_geometry), a.geom)) AS r FROM (select * from incidentaddress where state='{state}' and fdid='{fdid}') a left join parcel_risk_category_local b on a.geom && b.wkb_geometry) x WHERE x.r = 1) b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-test/{id}-building-fires.csv --acl="public-read"' DELIMITER ',' CSV HEADER;"""
for fd in vals:
self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n\n')
|
Update export building fires script.
|
Update export building fires script.
|
Python
|
mit
|
FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares,FireCARES/firecares
|
---
+++
@@ -12,9 +12,8 @@
def handle(self, *args, **options):
vals = FireDepartment.objects.filter(fdid__isnull=False, state__isnull=False).exclude(fdid__exact='')
- sql = """
- \COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y from buildingfires a left join incidentaddress b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-pipeline/heatmaps/{id}-building-fires.csv --acl=\"public-read\"' DELIMITER ',' CSV HEADER;
- """
+ sql = """\COPY (select alarm, a.inc_type, alarms,ff_death, oth_death, ST_X(geom) as x, st_y(geom) as y, COALESCE(b.risk_category, 'Unknown') as risk_category from buildingfires a left join (SELECT * FROM (SELECT state, fdid, inc_date, inc_no, exp_no, geom, b.parcel_id, b.risk_category, ROW_NUMBER() OVER (PARTITION BY state, fdid, inc_date, inc_no, exp_no, geom ORDER BY st_distance(st_centroid(b.wkb_geometry), a.geom)) AS r FROM (select * from incidentaddress where state='{state}' and fdid='{fdid}') a left join parcel_risk_category_local b on a.geom && b.wkb_geometry) x WHERE x.r = 1) b using (state, inc_date, exp_no, fdid, inc_no) where state='{state}' and fdid='{fdid}') to PROGRAM 'aws s3 cp - s3://firecares-test/{id}-building-fires.csv --acl="public-read"' DELIMITER ',' CSV HEADER;"""
for fd in vals:
- self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n')
+ self.stdout.write(sql.format(fdid=fd.fdid, state=fd.state, id=fd.id) + '\n\n')
+
|
289be31dff1bbff200054e5acffc4d6640caaa97
|
civis/utils/_jobs.py
|
civis/utils/_jobs.py
|
from civis import APIClient
from civis.futures import CivisFuture
def run_job(job_id, api_key=None):
"""Run a job.
Parameters
----------
job_id : str or int
The ID of the job.
api_key : str, optional
Your Civis API key. If not given, the :envvar:`CIVIS_API_KEY`
environment variable will be used.
Returns
-------
results : :class:`~civis.futures.CivisFuture`
A `CivisFuture` object.
"""
client = APIClient(api_key=api_key, resources='all')
run = client.jobs.post_runs(job_id)
return CivisFuture(client.jobs.get_runs,
(job_id, run['id']),
api_key=api_key)
|
from civis import APIClient
from civis.futures import CivisFuture
def run_job(job_id, api_key=None):
"""Run a job.
Parameters
----------
job_id : str or int
The ID of the job.
api_key : str, optional
Your Civis API key. If not given, the :envvar:`CIVIS_API_KEY`
environment variable will be used.
Returns
-------
results : :class:`~civis.futures.CivisFuture`
A `CivisFuture` object.
"""
client = APIClient(api_key=api_key, resources='all')
run = client.jobs.post_runs(job_id)
return CivisFuture(client.jobs.get_runs,
(job_id, run['id']),
api_key=api_key,
poll_on_creation=False)
|
Set poll_on_creation to False for run_job
|
Set poll_on_creation to False for run_job
|
Python
|
bsd-3-clause
|
civisanalytics/civis-python
|
---
+++
@@ -22,4 +22,5 @@
run = client.jobs.post_runs(job_id)
return CivisFuture(client.jobs.get_runs,
(job_id, run['id']),
- api_key=api_key)
+ api_key=api_key,
+ poll_on_creation=False)
|
9ff64fa4fe884b8c255737c38e9e6d108f16aec9
|
vinotes/apps/api/permissions.py
|
vinotes/apps/api/permissions.py
|
from rest_framework import permissions
class IsSameUserOrAdmin(permissions.BasePermission):
"""
Custom permissions to only allow same user or admins to view user details.
"""
def has_object_permission(self, request, view, user):
return request.user == user or request.user.is_staff
|
from rest_framework import permissions
class IsSameUserOrAdmin(permissions.BasePermission):
"""
Custom permission to only allow same user or admin to view user details.
"""
def has_object_permission(self, request, view, user):
return request.user == user or request.user.is_staff
|
Update docstring for IsSameUserOrAdmin permission.
|
Update docstring for IsSameUserOrAdmin permission.
|
Python
|
unlicense
|
rcutmore/vinotes-api,rcutmore/vinotes-api
|
---
+++
@@ -3,7 +3,7 @@
class IsSameUserOrAdmin(permissions.BasePermission):
"""
- Custom permissions to only allow same user or admins to view user details.
+ Custom permission to only allow same user or admin to view user details.
"""
def has_object_permission(self, request, view, user):
return request.user == user or request.user.is_staff
|
0ac671d554f322524741a795f4a3250ef705f872
|
server/ec2spotmanager/migrations/0010_extend_instance_types.py
|
server/ec2spotmanager/migrations/0010_extend_instance_types.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-08-24 14:55
from __future__ import unicode_literals
from django.db import migrations, models
import ec2spotmanager.models
class Migration(migrations.Migration):
dependencies = [
('ec2spotmanager', '0009_add_instance_size'),
]
operations = [
migrations.AlterField(
model_name='poolconfiguration',
name='ec2_instance_types',
field=models.CharField(blank=True, max_length=4095, null=True),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-08-24 14:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('ec2spotmanager', '0009_add_instance_size'),
]
operations = [
migrations.AlterField(
model_name='poolconfiguration',
name='ec2_instance_types',
field=models.CharField(blank=True, max_length=4095, null=True),
),
]
|
Fix Flake8 error in migration.
|
Fix Flake8 error in migration.
|
Python
|
mpl-2.0
|
MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager
|
---
+++
@@ -3,7 +3,6 @@
from __future__ import unicode_literals
from django.db import migrations, models
-import ec2spotmanager.models
class Migration(migrations.Migration):
|
0b3a414be19546df348ebca148362bf370c61c15
|
zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py
|
zerver/migrations/0127_disallow_chars_in_stream_and_user_name.py
|
# -*- coding: utf-8 -*-
from django.db import models, migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from typing import Text
def remove_special_chars_from_streamname(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
Stream = apps.get_model('zerver', 'Stream')
NAME_INVALID_CHARS = ['*', '@', '`', '#']
for stream in Stream.objects.all():
if (set(stream.name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
stream.name = stream.name.replace(char, ' ').strip()
while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
stream.name = stream.name + '^'
if len(stream.name) > 60:
# extremely unlikely, so just do something valid
stream.name = stream.name[-60:]
stream.save(update_fields=['name'])
def remove_special_chars_from_username(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
UserProfile = apps.get_model('zerver', 'UserProfile')
NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
for userprofile in UserProfile.objects.all():
if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
userprofile.save(update_fields=['full_name'])
if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
for char in NAME_INVALID_CHARS:
userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
userprofile.save(update_fields=['short_name'])
class Migration(migrations.Migration):
dependencies = [
('zerver', '0126_prereg_remove_users_without_realm'),
]
operations = [
migrations.RunPython(remove_special_chars_from_streamname),
migrations.RunPython(remove_special_chars_from_username),
]
|
# -*- coding: utf-8 -*-
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('zerver', '0126_prereg_remove_users_without_realm'),
]
operations = [
# There was a migration here, which wasn't ready for wide deployment
# and was backed out. This placeholder is left behind to avoid
# confusing the migration engine on any installs that applied the
# migration. (Fortunately no reverse migration is needed.)
]
|
Revert "migrations: Replace special chars in stream & user names with space."
|
Revert "migrations: Replace special chars in stream & user names with space."
This reverts commit acebd3a5e, as well as a subsequent fixup commit
0975bebac "quick fix: Fix migrations to be linear."
These changes need more work and thought before they're ready to
deploy on any large established Zulip server, such as zulipchat.com.
See discussion on #6534.
In place of the removed migration, leave behind a placeholder so
`manage.py migrate` doesn't get confused on installs where it was
already applied.
|
Python
|
apache-2.0
|
showell/zulip,andersk/zulip,tommyip/zulip,hackerkid/zulip,timabbott/zulip,synicalsyntax/zulip,brainwane/zulip,zulip/zulip,shubhamdhama/zulip,shubhamdhama/zulip,brainwane/zulip,jackrzhang/zulip,eeshangarg/zulip,rishig/zulip,rishig/zulip,eeshangarg/zulip,brainwane/zulip,rishig/zulip,zulip/zulip,rishig/zulip,eeshangarg/zulip,tommyip/zulip,showell/zulip,timabbott/zulip,rht/zulip,andersk/zulip,rht/zulip,kou/zulip,kou/zulip,punchagan/zulip,rht/zulip,showell/zulip,andersk/zulip,hackerkid/zulip,tommyip/zulip,tommyip/zulip,synicalsyntax/zulip,synicalsyntax/zulip,showell/zulip,timabbott/zulip,rht/zulip,brainwane/zulip,showell/zulip,rishig/zulip,dhcrzf/zulip,kou/zulip,zulip/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,jackrzhang/zulip,punchagan/zulip,hackerkid/zulip,dhcrzf/zulip,synicalsyntax/zulip,punchagan/zulip,shubhamdhama/zulip,rishig/zulip,jackrzhang/zulip,timabbott/zulip,brainwane/zulip,rht/zulip,jackrzhang/zulip,punchagan/zulip,shubhamdhama/zulip,jackrzhang/zulip,zulip/zulip,dhcrzf/zulip,kou/zulip,zulip/zulip,shubhamdhama/zulip,rishig/zulip,showell/zulip,shubhamdhama/zulip,synicalsyntax/zulip,punchagan/zulip,hackerkid/zulip,synicalsyntax/zulip,tommyip/zulip,kou/zulip,brainwane/zulip,rht/zulip,andersk/zulip,dhcrzf/zulip,brainwane/zulip,jackrzhang/zulip,zulip/zulip,rht/zulip,eeshangarg/zulip,dhcrzf/zulip,hackerkid/zulip,dhcrzf/zulip,eeshangarg/zulip,synicalsyntax/zulip,punchagan/zulip,hackerkid/zulip,hackerkid/zulip,zulip/zulip,andersk/zulip,timabbott/zulip,tommyip/zulip,andersk/zulip,eeshangarg/zulip,showell/zulip,jackrzhang/zulip,punchagan/zulip,dhcrzf/zulip,tommyip/zulip,timabbott/zulip,shubhamdhama/zulip,kou/zulip,timabbott/zulip
|
---
+++
@@ -1,39 +1,5 @@
# -*- coding: utf-8 -*-
-from django.db import models, migrations
-from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
-from django.db.migrations.state import StateApps
-from typing import Text
-
-def remove_special_chars_from_streamname(apps, schema_editor):
- # type: (StateApps, DatabaseSchemaEditor) -> None
- Stream = apps.get_model('zerver', 'Stream')
- NAME_INVALID_CHARS = ['*', '@', '`', '#']
- for stream in Stream.objects.all():
- if (set(stream.name).intersection(NAME_INVALID_CHARS)):
- for char in NAME_INVALID_CHARS:
- stream.name = stream.name.replace(char, ' ').strip()
-
- while Stream.objects.filter(name__iexact=stream.name, realm=stream.realm).exists():
- stream.name = stream.name + '^'
- if len(stream.name) > 60:
- # extremely unlikely, so just do something valid
- stream.name = stream.name[-60:]
- stream.save(update_fields=['name'])
-
-def remove_special_chars_from_username(apps, schema_editor):
- # type: (StateApps, DatabaseSchemaEditor) -> None
- UserProfile = apps.get_model('zerver', 'UserProfile')
- NAME_INVALID_CHARS = ['*', '`', '>', '"', '@', '#']
- for userprofile in UserProfile.objects.all():
- if (set(userprofile.full_name).intersection(NAME_INVALID_CHARS)):
- for char in NAME_INVALID_CHARS:
- userprofile.full_name = userprofile.full_name.replace(char, ' ').strip()
- userprofile.save(update_fields=['full_name'])
-
- if (set(userprofile.short_name).intersection(NAME_INVALID_CHARS)):
- for char in NAME_INVALID_CHARS:
- userprofile.short_name = userprofile.short_name.replace(char, ' ').strip()
- userprofile.save(update_fields=['short_name'])
+from django.db import migrations
class Migration(migrations.Migration):
@@ -42,6 +8,8 @@
]
operations = [
- migrations.RunPython(remove_special_chars_from_streamname),
- migrations.RunPython(remove_special_chars_from_username),
+ # There was a migration here, which wasn't ready for wide deployment
+ # and was backed out. This placeholder is left behind to avoid
+ # confusing the migration engine on any installs that applied the
+ # migration. (Fortunately no reverse migration is needed.)
]
|
e28988724edcf2c4d6c0545eb9b8dba50169dc28
|
config/flask_prod.py
|
config/flask_prod.py
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
import os
from rmc.config.flask_base import *
import rmc.shared.secrets as s
JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
LOG_DIR = '/home/rmc/logs'
LOG_PATH = os.path.join(LOG_DIR, 'server/server.log')
FB_APP_ID = '219309734863464'
FB_APP_SECRET = s.FB_APP_SECRET_PROD
|
Revert "Revert "Revert "do not use minified js on prod"""
|
Revert "Revert "Revert "do not use minified js on prod"""
This reverts commit c7d03f4f8d4d50ce837cf0df446a52b24e891cee.
|
Python
|
mit
|
shakilkanji/rmc,MichalKononenko/rmc,rageandqq/rmc,UWFlow/rmc,JGulbronson/rmc,UWFlow/rmc,MichalKononenko/rmc,sachdevs/rmc,MichalKononenko/rmc,sachdevs/rmc,UWFlow/rmc,sachdevs/rmc,ccqi/rmc,ccqi/rmc,duaayousif/rmc,JGulbronson/rmc,JGulbronson/rmc,MichalKononenko/rmc,duaayousif/rmc,ccqi/rmc,shakilkanji/rmc,JGulbronson/rmc,MichalKononenko/rmc,rageandqq/rmc,shakilkanji/rmc,rageandqq/rmc,UWFlow/rmc,ccqi/rmc,shakilkanji/rmc,ccqi/rmc,sachdevs/rmc,duaayousif/rmc,rageandqq/rmc,rageandqq/rmc,duaayousif/rmc,shakilkanji/rmc,duaayousif/rmc,UWFlow/rmc,sachdevs/rmc,JGulbronson/rmc
|
---
+++
@@ -3,7 +3,7 @@
from rmc.config.flask_base import *
import rmc.shared.secrets as s
-JS_DIR = 'js'
+JS_DIR = 'js_prod'
DEBUG = False
ENV = 'prod'
GA_PROPERTY_ID = 'UA-35073503-1'
|
1b9213f0ab0652da23f498799a19420313ec560b
|
examples/generate-manager-file.py
|
examples/generate-manager-file.py
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
haze = telepathy.client.ConnectionManager(service_name, object_path)
haze_mgr = haze[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
for protocol in haze_mgr.ListProtocols():
print "[Protocol %s]" % protocol
for param in haze_mgr.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
|
#!/usr/bin/python
import sys
import telepathy
from telepathy.interfaces import CONN_MGR_INTERFACE
if len(sys.argv) >= 2:
manager_name = sys.argv[1]
else:
manager_name = "haze"
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
object = telepathy.client.ConnectionManager(service_name, object_path)
manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
for protocol in manager.ListProtocols():
print "[Protocol %s]" % protocol
for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
print "required",
print
print
|
Rename variables to less haze-specific alternatives
|
Rename variables to less haze-specific alternatives
20070831150511-4210b-48ed5b3cc7b1f15f42885c043a1b9678257902a7.gz
|
Python
|
lgpl-2.1
|
freedesktop-unofficial-mirror/telepathy__telepathy-python,epage/telepathy-python,detrout/telepathy-python,max-posedon/telepathy-python,PabloCastellano/telepathy-python,freedesktop-unofficial-mirror/telepathy__telepathy-python,max-posedon/telepathy-python,epage/telepathy-python,PabloCastellano/telepathy-python,detrout/telepathy-python
|
---
+++
@@ -10,17 +10,17 @@
service_name = "org.freedesktop.Telepathy.ConnectionManager.%s" % manager_name
object_path = "/org/freedesktop/Telepathy/ConnectionManager/%s" % manager_name
-haze = telepathy.client.ConnectionManager(service_name, object_path)
-haze_mgr = haze[CONN_MGR_INTERFACE]
+object = telepathy.client.ConnectionManager(service_name, object_path)
+manager = object[CONN_MGR_INTERFACE]
print "[ConnectionManager]"
print "BusName=%s" % service_name
print "ObjectPath=%s" % object_path
print
-for protocol in haze_mgr.ListProtocols():
+for protocol in manager.ListProtocols():
print "[Protocol %s]" % protocol
- for param in haze_mgr.GetParameters(protocol):
+ for param in manager.GetParameters(protocol):
print "param-%s=%s" % (param[0], param[2]),
# FIXME: deal with the "register" flag
if param[1] == 1L:
|
e1dc2c3e2515daf3aae51242221fab4fbd5c553f
|
aim/db/migration/alembic_migrations/versions/72fa5bce100b_tree_model.py
|
aim/db/migration/alembic_migrations/versions/72fa5bce100b_tree_model.py
|
# Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Create TenantTree table
Revision ID: 72fa5bce100b
Revises:
Create Date: 2016-03-15 16:29:57.408348
"""
# revision identifiers, used by Alembic.
revision = '72fa5bce100b'
down_revision = '40855b7eb958'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'aim_tenant_trees',
sa.Column('rn', sa.String(64), nullable=False),
sa.Column('root_full_hash', sa.String(64), nullable=False),
sa.Column('tree', sa.LargeBinary, nullable=False),
sa.PrimaryKeyConstraint('rn'))
def downgrade():
pass
|
# Copyright (c) 2016 Cisco Systems
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Create TenantTree table
Revision ID: 72fa5bce100b
Revises:
Create Date: 2016-03-15 16:29:57.408348
"""
# revision identifiers, used by Alembic.
revision = '72fa5bce100b'
down_revision = '40855b7eb958'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'aim_tenant_trees',
sa.Column('tenant_rn', sa.String(64), nullable=False),
sa.Column('root_full_hash', sa.String(64), nullable=False),
sa.Column('tree', sa.LargeBinary, nullable=False),
sa.PrimaryKeyConstraint('tenant_rn'))
def downgrade():
pass
|
Fix db migration for tree model
|
Fix db migration for tree model
|
Python
|
apache-2.0
|
noironetworks/aci-integration-module,noironetworks/aci-integration-module
|
---
+++
@@ -34,10 +34,10 @@
def upgrade():
op.create_table(
'aim_tenant_trees',
- sa.Column('rn', sa.String(64), nullable=False),
+ sa.Column('tenant_rn', sa.String(64), nullable=False),
sa.Column('root_full_hash', sa.String(64), nullable=False),
sa.Column('tree', sa.LargeBinary, nullable=False),
- sa.PrimaryKeyConstraint('rn'))
+ sa.PrimaryKeyConstraint('tenant_rn'))
def downgrade():
|
f2000016a9e2acd4cad28b1ea301620723140a4e
|
sheldon/bot.py
|
sheldon/bot.py
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.error_message('Error with loading config:')
logger.error_message(str(error.__traceback__))
|
# -*- coding: utf-8 -*-
"""
@author: Seva Zhidkov
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
from sheldon.adapter import *
from sheldon.config import *
from sheldon.exceptions import *
from sheldon.manager import *
from sheldon.storage import *
from sheldon.utils import logger
class Sheldon:
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self, command_line_arguments):
"""
Function for loading bot.
:param command_line_arguments: dict, arguments for start script
:return:
"""
self._load_config(command_line_arguments)
def _load_config(self, command_line_arguments):
"""
Сreate and load bot config.
:param command_line_arguments: dict, arguments for creating config:
config-prefix - prefix of environment
variables.
Default - 'SHELDON_'
:return:
"""
# Config class is imported from sheldon.config
try:
if 'config-prefix' in command_line_arguments:
self.config = Config(prefix=command_line_arguments['config-prefix'])
else:
self.config = Config()
except Exception as error:
logger.critical_message('Error with loading config:')
logger.critical_message(str(error.__traceback__))
|
Change error status with config problems to critical
|
Change error status with config problems to critical
|
Python
|
mit
|
lises/sheldon
|
---
+++
@@ -49,5 +49,5 @@
else:
self.config = Config()
except Exception as error:
- logger.error_message('Error with loading config:')
- logger.error_message(str(error.__traceback__))
+ logger.critical_message('Error with loading config:')
+ logger.critical_message(str(error.__traceback__))
|
450cbc7d0cb9a2477b80e94c874de1da4a00e431
|
source/services/rotten_tomatoes_service.py
|
source/services/rotten_tomatoes_service.py
|
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
|
import requests
from bs4 import BeautifulSoup
from source.models.rt_rating import RTRating
class RottenTomatoesService:
__URL = 'http://www.rottentomatoes.com/m/'
__SEPERATOR = '_'
def __init__(self, title):
self.title = title
def get_rt_rating(self):
search_url = self.__URL + self.format_title()
movie_page = requests.get(search_url)
contents = movie_page.text
soup = BeautifulSoup(contents, 'lxml')
ratings = self.get_ratings(soup)
ratings.link = search_url
return ratings
def format_title(self):
formatted_title = self.title
if formatted_title.startswith('The '):
formatted_title = formatted_title.replace('The ', '', 1)
if "'s" in formatted_title:
formatted_title = formatted_title.replace("'s", 's')
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
formatted_title = formatted_title.replace(':', '')
return formatted_title
def get_ratings(self, soup):
items = []
for item in soup.findAll(attrs={'itemprop': 'ratingValue'}):
items.append(item.get_text().strip('%'))
return RTRating(items)
|
Remove colon from title for RT search
|
Remove colon from title for RT search
|
Python
|
mit
|
jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu,jeremyrea/caterblu
|
---
+++
@@ -34,6 +34,7 @@
formatted_title = formatted_title.replace(' ', self.__SEPERATOR)
formatted_title = formatted_title.replace('-', '')
+ formatted_title = formatted_title.replace(':', '')
return formatted_title
|
ec244cc9c56fec571502529ef24af2ca18d9f5f5
|
spirit/templatetags/tags/utils/gravatar.py
|
spirit/templatetags/tags/utils/gravatar.py
|
#-*- coding: utf-8 -*-
import hashlib
from django.utils.http import urlencode, urlquote
from .. import register
@register.simple_tag()
def get_gravatar_url(user, size, rating='g', default='identicon'):
url = "http://www.gravatar.com/avatar/"
hash = hashlib.md5(user.email.strip().lower()).hexdigest()
data = urlencode({'d': urlquote(default), 's': str(size), 'r': rating})
return "".join((url, hash, '?', data))
|
#-*- coding: utf-8 -*-
import hashlib
from django.utils.http import urlencode, urlquote
from django.utils.encoding import force_bytes
from .. import register
@register.simple_tag()
def get_gravatar_url(user, size, rating='g', default='identicon'):
url = "http://www.gravatar.com/avatar/"
hash = hashlib.md5(force_bytes(user.email.strip().lower().encode('utf_8'))).hexdigest()
data = urlencode({'d': urlquote(default), 's': str(size), 'r': rating})
return "".join((url, hash, '?', data))
|
Use django utils force_bytes to arguments of hashlib
|
Use django utils force_bytes to arguments of hashlib
|
Python
|
mit
|
alesdotio/Spirit,a-olszewski/Spirit,raybesiga/Spirit,a-olszewski/Spirit,dvreed/Spirit,ramaseshan/Spirit,adiyengar/Spirit,battlecat/Spirit,ramaseshan/Spirit,gogobook/Spirit,dvreed/Spirit,nitely/Spirit,mastak/Spirit,battlecat/Spirit,nitely/Spirit,a-olszewski/Spirit,alesdotio/Spirit,mastak/Spirit,alesdotio/Spirit,gogobook/Spirit,raybesiga/Spirit,adiyengar/Spirit,gogobook/Spirit,dvreed/Spirit,mastak/Spirit,battlecat/Spirit,nitely/Spirit,adiyengar/Spirit,ramaseshan/Spirit,raybesiga/Spirit
|
---
+++
@@ -4,6 +4,7 @@
import hashlib
from django.utils.http import urlencode, urlquote
+from django.utils.encoding import force_bytes
from .. import register
@@ -11,6 +12,6 @@
@register.simple_tag()
def get_gravatar_url(user, size, rating='g', default='identicon'):
url = "http://www.gravatar.com/avatar/"
- hash = hashlib.md5(user.email.strip().lower()).hexdigest()
+ hash = hashlib.md5(force_bytes(user.email.strip().lower().encode('utf_8'))).hexdigest()
data = urlencode({'d': urlquote(default), 's': str(size), 'r': rating})
return "".join((url, hash, '?', data))
|
85d71d8a5d7cdf34c12791b84c9f1bdec4ad1ed1
|
partner_compassion/wizards/portal_wizard.py
|
partner_compassion/wizards/portal_wizard.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
from odoo.tools import email_split
class PortalWizard(models.TransientModel):
_inherit = 'portal.wizard'
class PortalUser(models.TransientModel):
_inherit = 'portal.wizard.user'
@api.multi
def _create_user(self):
"""
Override portal user creation to prevent sending e-mail to new user.
"""
res_users = self.env['res.users'].with_context(
noshortcut=True, no_reset_password=True)
email = email_split(self.email)
if email:
email = email[0]
else:
email = self.partner_id.lastname.lower() + '@cs.local'
values = {
'email': email,
'login': email,
'partner_id': self.partner_id.id,
'groups_id': [(6, 0, [])],
'notify_email': 'none',
}
return res_users.create(values)
@api.multi
def _send_email(self):
""" Never send invitation e-mails. """
return True
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2016 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __manifest__.py
#
##############################################################################
from odoo import api, models
from odoo.tools import email_split
class PortalWizard(models.TransientModel):
_inherit = 'portal.wizard'
class PortalUser(models.TransientModel):
_inherit = 'portal.wizard.user'
@api.multi
def _create_user(self):
"""
Override portal user creation to prevent sending e-mail to new user.
"""
res_users = self.env['res.users'].with_context(
noshortcut=True, no_reset_password=True)
email = email_split(self.email)
if email:
email = email[0]
else:
email = self.partner_id.lastname.lower() + '@cs.local'
values = {
'email': email,
'login': email,
'partner_id': self.partner_id.id,
'groups_id': [(6, 0, [])],
'notify_email': 'none',
}
res = res_users.create(values)
res.notify_email = 'always'
return res
@api.multi
def _send_email(self):
""" Never send invitation e-mails. """
return True
|
Fix bug, set notify_email to always after create portal user
|
Fix bug, set notify_email to always after create portal user
|
Python
|
agpl-3.0
|
ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ecino/compassion-switzerland
|
---
+++
@@ -39,7 +39,9 @@
'groups_id': [(6, 0, [])],
'notify_email': 'none',
}
- return res_users.create(values)
+ res = res_users.create(values)
+ res.notify_email = 'always'
+ return res
@api.multi
def _send_email(self):
|
8e3beda427e2edc2bc7b8f7f96f8ef1c7dd571c7
|
downloads/urls.py
|
downloads/urls.py
|
from django.conf.urls import patterns, url
from django.utils.translation import ugettext_lazy as _
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(_(r'solutions/(?P<path>.*)$'), download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
)
|
from django.conf.urls import patterns, url
from django.utils.translation import ugettext_lazy as _
from problems.models import UserSolution
from .views import download_protected_file
urlpatterns = patterns('',
url(_(r'solutions/(?P<path>.*)$'), download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
url(_(r'corrected_solutions/(?P<path>.*)$'), download_protected_file,
dict(path_prefix='corrected_solutions/', model_class=UserSolution),
name='download_corrected_solution'),
)
|
Add view for displaying corrected solutions
|
downloads: Add view for displaying corrected solutions
|
Python
|
mit
|
matus-stehlik/roots,tbabej/roots,tbabej/roots,matus-stehlik/roots,rtrembecky/roots,matus-stehlik/roots,rtrembecky/roots,tbabej/roots,rtrembecky/roots
|
---
+++
@@ -9,4 +9,7 @@
url(_(r'solutions/(?P<path>.*)$'), download_protected_file,
dict(path_prefix='solutions/', model_class=UserSolution),
name='download_solution'),
+ url(_(r'corrected_solutions/(?P<path>.*)$'), download_protected_file,
+ dict(path_prefix='corrected_solutions/', model_class=UserSolution),
+ name='download_corrected_solution'),
)
|
390851ce7c606e803094487e6278ea5620d26f3c
|
src/python/vff.py
|
src/python/vff.py
|
"""Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['.']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
"""Show a command to edit fred files"""
import os
import sys
from dotsite.paths import makepath, pwd
def get_freds(paths):
if not paths:
paths = ['~/tmp']
result = set()
for path in paths:
path = makepath(path)
if path.isdir():
result |= {p for p in path.files('fred*') if p[-1] != '~'}
elif path.isfile() and path.name.startswith('fred'):
result.add(path)
return [pwd().relpathto(p) for p in result]
def main(args):
freds = get_freds(args)
if not freds:
return not os.EX_OK
print 'v %s' % ' '.join(freds)
return os.EX_OK
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
Put temp files in ~/tmp by default
|
Put temp files in ~/tmp by default
|
Python
|
mit
|
jalanb/jab,jalanb/dotjab,jalanb/jab,jalanb/dotjab
|
---
+++
@@ -9,7 +9,7 @@
def get_freds(paths):
if not paths:
- paths = ['.']
+ paths = ['~/tmp']
result = set()
for path in paths:
path = makepath(path)
|
5dece3e052a1bebda6b1e1af63855399435f2e49
|
src/mailflow/settings.py
|
src/mailflow/settings.py
|
SQLALCHEMY_DATABASE_URI = "postgres://@/mailflow"
SECRET_KEY = 'Chi6riup1gaetiengaShoh=Wey1pohph0ieDaes7eeph'
INBOX_LOGIN_LENGTH = 16
INBOX_PASSWORD_LENGTH = 16
INBOX_PAGE_SIZE = 50
INBOX_HOST = 'mailflow.openpz.org'
INBOX_PORT = 25
RAW_EMAIL_FOLDER = "/var/tmp"
RABBITMQ_URI = 'amqp://mailflow:youneverguess@localhost//mail'
RABBITMQ_EXCHANGE_NAME = 'mail'
RABBITMQ_MAIL_QUEUE_PREFIX = 'newmail'
|
SQLALCHEMY_DATABASE_URI = "postgres://@/mailflow"
SECRET_KEY = 'Chi6riup1gaetiengaShoh=Wey1pohph0ieDaes7eeph'
INBOX_LOGIN_LENGTH = 16
INBOX_PASSWORD_LENGTH = 16
INBOX_PAGE_SIZE = 50
INBOX_HOST = 'mailflow.openpz.org'
INBOX_PORT = 25
RAW_EMAIL_FOLDER = "/var/tmp"
RABBITMQ_URI = 'amqp://mailflow:youneverguess@localhost//mail'
RABBITMQ_EXCHANGE_NAME = 'mail'
RABBITMQ_MAIL_QUEUE_PREFIX = 'newmail'
WTF_CSRF_TIME_LIMIT = 21600
|
Set CSRF token expirtion time to 6 hours
|
Set CSRF token expirtion time to 6 hours
|
Python
|
apache-2.0
|
zzzombat/mailflow,zzzombat/mailflow
|
---
+++
@@ -13,3 +13,5 @@
RABBITMQ_URI = 'amqp://mailflow:youneverguess@localhost//mail'
RABBITMQ_EXCHANGE_NAME = 'mail'
RABBITMQ_MAIL_QUEUE_PREFIX = 'newmail'
+
+WTF_CSRF_TIME_LIMIT = 21600
|
a93b8986f1cd2c64b75abd9bf695ff6e44a85b29
|
scripts/travis_build_dependent_projects.py
|
scripts/travis_build_dependent_projects.py
|
# -*- coding: utf-8 -*-
import os
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
print "Enumerate repos on ", domain
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
print u"Checking repo: {}\n{}".format(repo.slug, repo.description)
try:
build = conn.build(repo.last_build_id)
if "kitrun.py" in build.config.get("script", [""])[0]:
print "Found drift project: ", repo.slug
if not build.running:
print "Restarting..."
build.restart()
restarted.append(repo.slug)
else:
print "Build is already running!"
building.append(repo.slug)
else:
print "Not a drift based project."
except Exception as e:
print "Can't build repo: ", e
print ""
if restarted:
print "Repos restarted:"
for reponame in restarted:
print "\t", reponame
else:
print "No builds restarted."
if building:
print "Repos already building:"
for reponame in building:
print "\t", reponame
if __name__ == "__main__":
main()
|
# -*- coding: utf-8 -*-
import os
from travispy import travispy
from travispy import TravisPy
def main():
restarted = []
building = []
for domain in [travispy.PUBLIC, travispy.PRIVATE]:
print "Enumerate repos on ", domain
conn = TravisPy.github_auth(os.environ['GITHUB_KEY'], domain)
user = conn.user()
repos = conn.repos(member=user.login)
for repo in repos:
if not repo.active:
continue
print u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)
try:
build = conn.build(repo.last_build_id)
if "kitrun.py" in build.config.get("script", [""])[0]:
print "Found drift project: ", repo.slug
if not build.running:
print "Restarting..."
build.restart()
restarted.append(repo.slug)
else:
print "Build is already running!"
building.append(repo.slug)
else:
print "Not a drift based project."
except Exception as e:
print "Can't build repo: ", e
print ""
if restarted:
print "Repos restarted:"
for reponame in restarted:
print "\t", reponame
else:
print "No builds restarted."
if building:
print "Repos already building:"
for reponame in building:
print "\t", reponame
if __name__ == "__main__":
main()
|
Fix to Travis build script.
|
Fix to Travis build script.
|
Python
|
mit
|
dgnorth/drift,dgnorth/drift,dgnorth/drift
|
---
+++
@@ -19,7 +19,7 @@
for repo in repos:
if not repo.active:
continue
- print u"Checking repo: {}\n{}".format(repo.slug, repo.description)
+ print u"Checking repo: {}\n{!r}".format(repo.slug, repo.description)
try:
build = conn.build(repo.last_build_id)
if "kitrun.py" in build.config.get("script", [""])[0]:
|
08d83313d2e077e760adc9a4bfd7c181d9f8a60a
|
cal_pipe/update_pipeline_paths.py
|
cal_pipe/update_pipeline_paths.py
|
'''
Update EVLA pipeline variables to the current system.
'''
def update_paths(pipe_dict, ms_path, pipepath):
pipe_dict['ms_active'] = ms_path
pipe_dict['SDM_name'] = ms_path[:-3] # Cutoff '.ms'
pipe_dict['pipepath'] = pipepath
return pipe_dict
if __name__ == '__main__':
import sys
pipe_var_file = str(sys.argv[5])
ms_path = str(sys.argv[6])
pipepath = str(sys.argv[7])
import shelve
pipe_dict = shelve.open(pipe_var_file, writeback=True)
pipe_dict = update_paths(pipe_dict, ms_path, pipepath)
pipe_dict.sync()
pipe_dict.close()
|
'''
Update EVLA pipeline variables to the current system.
'''
def update_paths(pipe_dict, ms_path, pipepath):
pipe_dict['ms_active'] = ms_path
pipe_dict['SDM_name'] = ms_path[:-3] # Cutoff '.ms'
pipe_dict['pipepath'] = pipepath
return pipe_dict
if __name__ == '__main__':
import sys
pipe_var_file = str(sys.argv[5])
ms_path = str(sys.argv[6])
pipepath = str(sys.argv[7])
import shelve
pipe_dict = shelve.open(pipe_var_file, writeback=True)
pipe_dict = update_paths(pipe_dict, ms_path, pipepath)
pipe_dict.sync()
print "Checking!"
print pipe_dict['ms_active']
print pipe_dict['SDM_name']
print pipe_dict['pipepath']
pipe_dict.close()
|
Print out changed paths in pipeline file
|
Print out changed paths in pipeline file
|
Python
|
mit
|
e-koch/canfar_scripts,e-koch/canfar_scripts
|
---
+++
@@ -31,4 +31,9 @@
pipe_dict.sync()
+ print "Checking!"
+ print pipe_dict['ms_active']
+ print pipe_dict['SDM_name']
+ print pipe_dict['pipepath']
+
pipe_dict.close()
|
cc08888a527dac321f88cbe9da27508aee62e51e
|
examples/lab/main.py
|
examples/lab/main.py
|
"""
Copyright (c) Jupyter Development Team.
Distributed under the terms of the Modified BSD License.
"""
import os
from jinja2 import FileSystemLoader
from notebook.base.handlers import IPythonHandler, FileFindHandler
from notebook.notebookapp import NotebookApp
from traitlets import Unicode
class ExampleHandler(IPythonHandler):
"""Handle requests between the main app page and notebook server."""
def get(self):
"""Get the main page for the application's interface."""
return self.write(self.render_template("index.html",
static=self.static_url, base_url=self.base_url))
def get_template(self, name):
loader = FileSystemLoader(os.getcwd())
return loader.load(self.settings['jinja2_env'], name)
class ExampleApp(NotebookApp):
default_url = Unicode('/example')
def init_webapp(self):
"""initialize tornado webapp and httpserver.
"""
super(ExampleApp, self).init_webapp()
default_handlers = [
(r'/example/?', ExampleHandler),
(r"/example/(.*)", FileFindHandler,
{'path': 'build'}),
]
self.web_app.add_handlers(".*$", default_handlers)
if __name__ == '__main__':
ExampleApp.launch_instance()
|
"""
Copyright (c) Jupyter Development Team.
Distributed under the terms of the Modified BSD License.
"""
import os
from jinja2 import FileSystemLoader
from notebook.base.handlers import IPythonHandler, FileFindHandler
from notebook.notebookapp import NotebookApp
from traitlets import Unicode
class ExampleHandler(IPythonHandler):
"""Handle requests between the main app page and notebook server."""
def get(self):
"""Get the main page for the application's interface."""
return self.write(self.render_template("index.html",
static=self.static_url, base_url=self.base_url,
terminals_available=True))
def get_template(self, name):
loader = FileSystemLoader(os.getcwd())
return loader.load(self.settings['jinja2_env'], name)
class ExampleApp(NotebookApp):
default_url = Unicode('/example')
def init_webapp(self):
"""initialize tornado webapp and httpserver.
"""
super(ExampleApp, self).init_webapp()
default_handlers = [
(r'/example/?', ExampleHandler),
(r"/example/(.*)", FileFindHandler,
{'path': 'build'}),
]
self.web_app.add_handlers(".*$", default_handlers)
if __name__ == '__main__':
ExampleApp.launch_instance()
|
Enable terminals in the lab example
|
Enable terminals in the lab example
|
Python
|
bsd-3-clause
|
eskirk/jupyterlab,eskirk/jupyterlab,jupyter/jupyterlab,eskirk/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,charnpreetsingh185/jupyterlab,charnpreetsingh185/jupyterlab,charnpreetsingh185/jupyterlab,jupyter/jupyterlab,jupyter/jupyterlab,eskirk/jupyterlab,charnpreetsingh185/jupyterlab,eskirk/jupyterlab,charnpreetsingh185/jupyterlab
|
---
+++
@@ -16,7 +16,8 @@
def get(self):
"""Get the main page for the application's interface."""
return self.write(self.render_template("index.html",
- static=self.static_url, base_url=self.base_url))
+ static=self.static_url, base_url=self.base_url,
+ terminals_available=True))
def get_template(self, name):
loader = FileSystemLoader(os.getcwd())
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.