commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
45313f2313e5b5a4636469b4b82a02a8e62fc1f1
|
helper.py
|
helper.py
|
class Filters():
"""
Get a datetime instance object or a int() Epoch timestamp and return a pretty
string like 'an hour ago', 'Yesterday', '3 month ago', 'just now' etc.
"""
def prettify_time(self, time=False):
if time == None:
return time
from datetime import datetime
now = datetime.now()
if type(time) is str or type(time) is unicode:
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
day_diff = diff.days
second_diff = diff.seconds
if diff < 0:
return ''
if day_diff == 0:
if second_diff < 20:
return "Just now"
if second_diff < 60:
return str(second_diff) + " seconds ago."
if second_diff < 120:
return "One minute ago."
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago."
if second_diff < 7200:
return "One hour ago."
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago."
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " ago."
if day_diff < 31:
return str(day_diff / 7) + " weeks ago."
if day_diff < 365:
return str(day_diff / 30) + " months ago."
return (day_diff / 365) + " years ago."
|
Add the method to prettify the time displayed.
|
Add the method to prettify the time displayed.
|
Python
|
mit
|
yiyangyi/cc98-tornado
|
Add the method to prettify the time displayed.
|
class Filters():
"""
Get a datetime instance object or a int() Epoch timestamp and return a pretty
string like 'an hour ago', 'Yesterday', '3 month ago', 'just now' etc.
"""
def prettify_time(self, time=False):
if time == None:
return time
from datetime import datetime
now = datetime.now()
if type(time) is str or type(time) is unicode:
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
day_diff = diff.days
second_diff = diff.seconds
if diff < 0:
return ''
if day_diff == 0:
if second_diff < 20:
return "Just now"
if second_diff < 60:
return str(second_diff) + " seconds ago."
if second_diff < 120:
return "One minute ago."
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago."
if second_diff < 7200:
return "One hour ago."
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago."
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " ago."
if day_diff < 31:
return str(day_diff / 7) + " weeks ago."
if day_diff < 365:
return str(day_diff / 30) + " months ago."
return (day_diff / 365) + " years ago."
|
<commit_before><commit_msg>Add the method to prettify the time displayed.<commit_after>
|
class Filters():
"""
Get a datetime instance object or a int() Epoch timestamp and return a pretty
string like 'an hour ago', 'Yesterday', '3 month ago', 'just now' etc.
"""
def prettify_time(self, time=False):
if time == None:
return time
from datetime import datetime
now = datetime.now()
if type(time) is str or type(time) is unicode:
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
day_diff = diff.days
second_diff = diff.seconds
if diff < 0:
return ''
if day_diff == 0:
if second_diff < 20:
return "Just now"
if second_diff < 60:
return str(second_diff) + " seconds ago."
if second_diff < 120:
return "One minute ago."
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago."
if second_diff < 7200:
return "One hour ago."
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago."
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " ago."
if day_diff < 31:
return str(day_diff / 7) + " weeks ago."
if day_diff < 365:
return str(day_diff / 30) + " months ago."
return (day_diff / 365) + " years ago."
|
Add the method to prettify the time displayed.class Filters():
"""
Get a datetime instance object or a int() Epoch timestamp and return a pretty
string like 'an hour ago', 'Yesterday', '3 month ago', 'just now' etc.
"""
def prettify_time(self, time=False):
if time == None:
return time
from datetime import datetime
now = datetime.now()
if type(time) is str or type(time) is unicode:
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
day_diff = diff.days
second_diff = diff.seconds
if diff < 0:
return ''
if day_diff == 0:
if second_diff < 20:
return "Just now"
if second_diff < 60:
return str(second_diff) + " seconds ago."
if second_diff < 120:
return "One minute ago."
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago."
if second_diff < 7200:
return "One hour ago."
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago."
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " ago."
if day_diff < 31:
return str(day_diff / 7) + " weeks ago."
if day_diff < 365:
return str(day_diff / 30) + " months ago."
return (day_diff / 365) + " years ago."
|
<commit_before><commit_msg>Add the method to prettify the time displayed.<commit_after>class Filters():
"""
Get a datetime instance object or a int() Epoch timestamp and return a pretty
string like 'an hour ago', 'Yesterday', '3 month ago', 'just now' etc.
"""
def prettify_time(self, time=False):
if time == None:
return time
from datetime import datetime
now = datetime.now()
if type(time) is str or type(time) is unicode:
time = datetime.strptime(time, '%Y-%m-%d %H:%M:%S')
elif type(time) is int:
diff = now - datetime.fromtimestamp(time)
elif isinstance(time, datetime):
diff = now - time
elif not time:
diff = now - now
day_diff = diff.days
second_diff = diff.seconds
if diff < 0:
return ''
if day_diff == 0:
if second_diff < 20:
return "Just now"
if second_diff < 60:
return str(second_diff) + " seconds ago."
if second_diff < 120:
return "One minute ago."
if second_diff < 3600:
return str(second_diff / 60) + " minutes ago."
if second_diff < 7200:
return "One hour ago."
if second_diff < 86400:
return str(second_diff / 3600) + " hours ago."
if day_diff == 1:
return "Yesterday"
if day_diff < 7:
return str(day_diff) + " ago."
if day_diff < 31:
return str(day_diff / 7) + " weeks ago."
if day_diff < 365:
return str(day_diff / 30) + " months ago."
return (day_diff / 365) + " years ago."
|
|
e69c9db3efc5f71a5852a28ea77a215d083a6b64
|
server/inventory/views.py
|
server/inventory/views.py
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
if request.user.username:
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
if request.user.username:
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
|
Add the 401 Unauthorized when no username is detected, thus no user is logged in. This is the most basic form of permissions, where any user can log in and do anything.
|
Add the 401 Unauthorized when no username is detected, thus no user
is logged in. This is the most basic form of permissions, where any
user can log in and do anything.
|
Python
|
agpl-3.0
|
TomDataworks/angular-inventory,TomDataworks/angular-inventory
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
Add the 401 Unauthorized when no username is detected, thus no user
is logged in. This is the most basic form of permissions, where any
user can log in and do anything.
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
if request.user.username:
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
if request.user.username:
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
|
<commit_before>from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
<commit_msg>Add the 401 Unauthorized when no username is detected, thus no user
is logged in. This is the most basic form of permissions, where any
user can log in and do anything.<commit_after>
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
if request.user.username:
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
if request.user.username:
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
|
from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
Add the 401 Unauthorized when no username is detected, thus no user
is logged in. This is the most basic form of permissions, where any
user can log in and do anything.from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
if request.user.username:
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
if request.user.username:
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
|
<commit_before>from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
<commit_msg>Add the 401 Unauthorized when no username is detected, thus no user
is logged in. This is the most basic form of permissions, where any
user can log in and do anything.<commit_after>from django.shortcuts import render
from django.core import serializers
from inventory.models import Item
from decimal import Decimal
import json
from django.utils import simplejson
# Create your views here.
from django.http import HttpResponse
from inventory.models import Item
def index(request):
if request.method == 'GET':
list_of_items = Item.objects.all()
data = serializers.serialize("json", list_of_items)
return HttpResponse(data, content_type="application/json")
if request.method == 'POST':
if request.user.username:
data = simplejson.loads(request.body.decode(), parse_float=Decimal)['fields']
items = Item.objects.filter(itemId=data['itemId'])
if items:
for o in items:
o.count = data['count']
o.save()
else:
item = Item(itemId=data['itemId'], count=data['count'], name=data['name'], short=data['short'], desc=data['desc'])
item.save()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
def detail(request, item_id):
if request.method == 'GET':
item = Item.objects.filter(itemId=item_id)
data = serializers.serialize("json", item)
return HttpResponse(data, content_type="application/json")
if request.method == 'DELETE':
if request.user.username:
Item.objects.filter(itemId=item_id).delete()
return HttpResponse({}, content_type="application/json")
else:
return HttpResponse('Unauthorized', status=401)
|
876c9b81a295c30a644bfe3e8efa5f0d644b9b67
|
app/soc/modules/gsoc/logic/program.py
|
app/soc/modules/gsoc/logic/program.py
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program.
"""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent gci program.
"""
return data.site.latest_gsoc
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program."""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent GSoC program.
"""
return data.site.latest_gsoc
|
Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
|
Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
This fixes issue 1790, and thanks to Piyush Bansal for reporting the error
and directing its fix.
|
Python
|
apache-2.0
|
rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program.
"""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent gci program.
"""
return data.site.latest_gsoc
Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
This fixes issue 1790, and thanks to Piyush Bansal for reporting the error
and directing its fix.
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program."""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent GSoC program.
"""
return data.site.latest_gsoc
|
<commit_before># Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program.
"""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent gci program.
"""
return data.site.latest_gsoc
<commit_msg>Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
This fixes issue 1790, and thanks to Piyush Bansal for reporting the error
and directing its fix.<commit_after>
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program."""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent GSoC program.
"""
return data.site.latest_gsoc
|
# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program.
"""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent gci program.
"""
return data.site.latest_gsoc
Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
This fixes issue 1790, and thanks to Piyush Bansal for reporting the error
and directing its fix.# Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program."""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent GSoC program.
"""
return data.site.latest_gsoc
|
<commit_before># Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program.
"""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent gci program.
"""
return data.site.latest_gsoc
<commit_msg>Fix a leftover "gci" documentation note to correctly refer to Summer of Code.
This fixes issue 1790, and thanks to Piyush Bansal for reporting the error
and directing its fix.<commit_after># Copyright 2012 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""GSoC logic for program."""
def getMostRecentProgram(data):
"""Returns the most recent program.
Returns:
The program link_id for the most recent GSoC program.
"""
return data.site.latest_gsoc
|
428b4b0025dd7bb0edf5d3df8c32703d96ab577b
|
src/shared/unit_orders.py
|
src/shared/unit_orders.py
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
Check for None before indexing.
|
Check for None before indexing.
|
Python
|
mit
|
CheeseLord/warts,CheeseLord/warts
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
Check for None before indexing.
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
<commit_before>class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
<commit_msg>Check for None before indexing.<commit_after>
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
Check for None before indexing.class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
<commit_before>class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
return self.orders[unit][0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
<commit_msg>Check for None before indexing.<commit_after>class UnitOrders(object):
def __init__(self):
self.orders = {}
def giveOrders(self, unit, orders):
if orders is not None and not isinstance(orders, list):
orders = list(orders)
self.orders[unit] = orders
def getNextOrder(self, unit):
try:
orders = self.orders[unit]
if orders is None:
return None
else:
return orders[0]
except (KeyError, IndexError):
return None
def removeNextOrder(self, unit):
self.orders[unit] = self.orders[unit][1:]
if not self.orders[unit]:
del self.orders[unit]
def getAllUnitsNextOrders(self):
return {x: self.getNextOrder(x) for x in self.orders}
|
74b03f3d47011bad6129f8ccfe466a4b28d2338a
|
troposphere/workspaces.py
|
troposphere/workspaces.py
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
Python
|
bsd-2-clause
|
johnctitus/troposphere,cloudtools/troposphere,johnctitus/troposphere,pas256/troposphere,pas256/troposphere,cloudtools/troposphere,ikben/troposphere,ikben/troposphere
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
Add Tags and WorkspaceProperties to WorkSpaces::Workspace
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
<commit_before># Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
<commit_msg>Add Tags and WorkspaceProperties to WorkSpaces::Workspace<commit_after>
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
Add Tags and WorkspaceProperties to WorkSpaces::Workspace# Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
<commit_before># Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject
from .validators import boolean
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
}
<commit_msg>Add Tags and WorkspaceProperties to WorkSpaces::Workspace<commit_after># Copyright (c) 2015, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty, Tags
from .validators import boolean, integer
class WorkspaceProperties(AWSProperty):
props = {
'ComputeTypeName': (basestring, False),
'RootVolumeSizeGib': (integer, False),
'RunningMode': (basestring, False),
'RunningModeAutoStopTimeoutInMinutes': (integer, False),
'UserVolumeSizeGib': (integer, False),
}
class Workspace(AWSObject):
resource_type = "AWS::WorkSpaces::Workspace"
props = {
'BundleId': (basestring, True),
'DirectoryId': (basestring, True),
'UserName': (basestring, True),
'RootVolumeEncryptionEnabled': (boolean, False),
'Tags': (Tags, False),
'UserVolumeEncryptionEnabled': (boolean, False),
'VolumeEncryptionKey': (basestring, False),
'WorkspaceProperties': (WorkspaceProperties, False),
}
|
0218edee60a56996d97fbaa7e35f0c695bf5e3a9
|
src/hpp/utils.py
|
src/hpp/utils.py
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
# Give some time to HPP to properly shutdown.
time.sleep(1)
# Once HPP process is stopped, this removes the defunct process.
self.process.communicate()
|
Add sleep after shutting down hppcorbaserver in ServerManager
|
Add sleep after shutting down hppcorbaserver in ServerManager
Co-authored-by: Joseph Mirabel <c794dc978753694c40f4d5fd4b8c12595a76e068@gmail.com>
|
Python
|
bsd-2-clause
|
humanoid-path-planner/hpp-corbaserver,humanoid-path-planner/hpp-corbaserver
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
Add sleep after shutting down hppcorbaserver in ServerManager
Co-authored-by: Joseph Mirabel <c794dc978753694c40f4d5fd4b8c12595a76e068@gmail.com>
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
# Give some time to HPP to properly shutdown.
time.sleep(1)
# Once HPP process is stopped, this removes the defunct process.
self.process.communicate()
|
<commit_before># Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
<commit_msg>Add sleep after shutting down hppcorbaserver in ServerManager
Co-authored-by: Joseph Mirabel <c794dc978753694c40f4d5fd4b8c12595a76e068@gmail.com><commit_after>
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
# Give some time to HPP to properly shutdown.
time.sleep(1)
# Once HPP process is stopped, this removes the defunct process.
self.process.communicate()
|
# Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
Add sleep after shutting down hppcorbaserver in ServerManager
Co-authored-by: Joseph Mirabel <c794dc978753694c40f4d5fd4b8c12595a76e068@gmail.com># Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
# Give some time to HPP to properly shutdown.
time.sleep(1)
# Once HPP process is stopped, this removes the defunct process.
self.process.communicate()
|
<commit_before># Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
self.process.communicate()
<commit_msg>Add sleep after shutting down hppcorbaserver in ServerManager
Co-authored-by: Joseph Mirabel <c794dc978753694c40f4d5fd4b8c12595a76e068@gmail.com><commit_after># Copyright (c) 2020, CNRS
# Authors: Guilhem Saurel <guilhem.saurel@laas.fr>
import os
import subprocess
import time
import hpp.corbaserver
try:
from subprocess import DEVNULL, run
except ImportError: # Python2 fallback
DEVNULL = os.open(os.devnull, os.O_RDWR)
def run(*args):
subprocess.Popen(*args).wait()
class ServerManager:
"""A context to ensure a server is running."""
def __init__(self, server="hppcorbaserver"):
self.server = server
run(["killall", self.server])
def __enter__(self):
"""Run the server in background
stdout and stderr outputs of the child process are redirected to devnull.
preexec_fn is used to ignore ctrl-c signal send to the main script
(otherwise they are forwarded to the child process)
"""
self.process = subprocess.Popen(
self.server, stdout=DEVNULL, stderr=DEVNULL, preexec_fn=os.setpgrp
)
# give it some time to start
time.sleep(3)
def __exit__(self, exc_type, exc_value, exc_traceback):
tool = hpp.corbaserver.tools.Tools()
tool.shutdown()
# Give some time to HPP to properly shutdown.
time.sleep(1)
# Once HPP process is stopped, this removes the defunct process.
self.process.communicate()
|
cfa3851b83e8c13ed1bb553eced344d162c05dfb
|
singleuser/user-config.py
|
singleuser/user-config.py
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
variables = globals()
# Do not pass in variables we do not want users to change
for var in ['usernames', 'sysopnames']:
del variables[var]
del var
exec(compile(f.read(), custom_path, 'exec'), variables)
del variables
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
Revert "Do not allow easy setting of username to something else"
|
Revert "Do not allow easy setting of username to something else"
This removes the usernames completely, making it fail when we
try to set it later.
This reverts commit b49967ad58c520279a244a15ed81bca5453b923e.
|
Python
|
mit
|
yuvipanda/paws,yuvipanda/paws
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
variables = globals()
# Do not pass in variables we do not want users to change
for var in ['usernames', 'sysopnames']:
del variables[var]
del var
exec(compile(f.read(), custom_path, 'exec'), variables)
del variables
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
Revert "Do not allow easy setting of username to something else"
This removes the usernames completely, making it fail when we
try to set it later.
This reverts commit b49967ad58c520279a244a15ed81bca5453b923e.
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
<commit_before>import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
variables = globals()
# Do not pass in variables we do not want users to change
for var in ['usernames', 'sysopnames']:
del variables[var]
del var
exec(compile(f.read(), custom_path, 'exec'), variables)
del variables
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
<commit_msg>Revert "Do not allow easy setting of username to something else"
This removes the usernames completely, making it fail when we
try to set it later.
This reverts commit b49967ad58c520279a244a15ed81bca5453b923e.<commit_after>
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
variables = globals()
# Do not pass in variables we do not want users to change
for var in ['usernames', 'sysopnames']:
del variables[var]
del var
exec(compile(f.read(), custom_path, 'exec'), variables)
del variables
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
Revert "Do not allow easy setting of username to something else"
This removes the usernames completely, making it fail when we
try to set it later.
This reverts commit b49967ad58c520279a244a15ed81bca5453b923e.import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
<commit_before>import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
variables = globals()
# Do not pass in variables we do not want users to change
for var in ['usernames', 'sysopnames']:
del variables[var]
del var
exec(compile(f.read(), custom_path, 'exec'), variables)
del variables
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
<commit_msg>Revert "Do not allow easy setting of username to something else"
This removes the usernames completely, making it fail when we
try to set it later.
This reverts commit b49967ad58c520279a244a15ed81bca5453b923e.<commit_after>import os
mylang = 'test'
family = 'wikipedia'
custom_path = os.path.expanduser('~/user-config.py')
if os.path.exists(custom_path):
with open(custom_path, 'r') as f:
exec(compile(f.read(), custom_path, 'exec'), globals())
del f
# Clean up temp variables, since pwb issues a warning otherwise
# to help people catch misspelt config
del custom_path
# Things that should be non-easily-overridable
usernames[family]['*'] = os.environ['JPY_USER']
# If OAuth integration is available, take it
if 'CLIENT_ID' in os.environ:
authenticate['*'] = (
os.environ['CLIENT_ID'],
os.environ['CLIENT_SECRET'],
os.environ['ACCESS_KEY'],
os.environ['ACCESS_SECRET']
)
|
9031514ee6618646a35c4ad5910c9d3d5a92520e
|
tests/system/conftest.py
|
tests/system/conftest.py
|
import pytest
import socket
import struct
from threading import Thread
from server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
import pytest
import socket
import struct
from threading import Thread
from .server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
Fix import error in test suite so tests pass in Python 3.3+.
|
Fix import error in test suite so tests pass in Python 3.3+.
|
Python
|
mpl-2.0
|
AdvancedClimateSystems/python-modbus,AdvancedClimateSystems/uModbus
|
import pytest
import socket
import struct
from threading import Thread
from server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
Fix import error in test suite so tests pass in Python 3.3+.
|
import pytest
import socket
import struct
from threading import Thread
from .server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
<commit_before>import pytest
import socket
import struct
from threading import Thread
from server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
<commit_msg>Fix import error in test suite so tests pass in Python 3.3+.<commit_after>
|
import pytest
import socket
import struct
from threading import Thread
from .server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
import pytest
import socket
import struct
from threading import Thread
from server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
Fix import error in test suite so tests pass in Python 3.3+.import pytest
import socket
import struct
from threading import Thread
from .server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
<commit_before>import pytest
import socket
import struct
from threading import Thread
from server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
<commit_msg>Fix import error in test suite so tests pass in Python 3.3+.<commit_after>import pytest
import socket
import struct
from threading import Thread
from .server import app
@pytest.fixture(autouse=True, scope="module")
def server(request):
t = Thread(target=app.serve_forever)
t.start()
def fin():
app.shutdown()
app.server_close()
t.join()
request.addfinalizer(fin)
return app
@pytest.yield_fixture
def sock(server):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(server.socket.getsockname())
yield sock
sock.close()
@pytest.fixture
def mbap():
return struct.pack('>HHHB', 0, 0, 6, 1)
|
91d613ace34bf8f50a86ec3464612af561d398c4
|
tests/test_boto_store.py
|
tests/test_boto_store.py
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
def test_key_error_on_nonexistant_get_filename(self, store, key):
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
Fix another boto /dev/null testing error.
|
Fix another boto /dev/null testing error.
|
Python
|
mit
|
karteek/simplekv,fmarczin/simplekv,fmarczin/simplekv,mbr/simplekv,mbr/simplekv,karteek/simplekv
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
Fix another boto /dev/null testing error.
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
def test_key_error_on_nonexistant_get_filename(self, store, key):
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
<commit_before>#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
<commit_msg>Fix another boto /dev/null testing error.<commit_after>
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
def test_key_error_on_nonexistant_get_filename(self, store, key):
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
Fix another boto /dev/null testing error.#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
def test_key_error_on_nonexistant_get_filename(self, store, key):
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
<commit_before>#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
<commit_msg>Fix another boto /dev/null testing error.<commit_after>#!/usr/bin/env python
import os
from tempdir import TempDir
import pytest
boto = pytest.importorskip('boto')
from simplekv.net.botostore import BotoStore
from basic_store import BasicStore
from url_store import UrlStore
from bucket_manager import boto_credentials, boto_bucket
@pytest.fixture(params=boto_credentials,
ids=[c['access_key'] for c in boto_credentials])
def credentials(request):
return request.param
@pytest.yield_fixture()
def bucket(credentials):
with boto_bucket(**credentials) as bucket:
yield bucket
class TestBotoStorage(BasicStore, UrlStore):
@pytest.fixture(params=['', '/test-prefix'])
def prefix(self, request):
return request.param
@pytest.fixture
def store(self, bucket, prefix):
return BotoStore(bucket, prefix)
def test_get_filename_nonexistant(self, store, key):
# NOTE: boto misbehaves here and tries to erase the target file
# the parent tests use /dev/null, which you really should not try
# to os.remove!
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
def test_key_error_on_nonexistant_get_filename(self, store, key):
with TempDir() as tmpdir:
with pytest.raises(KeyError):
store.get_file(key, os.path.join(tmpdir, 'a'))
|
05b54e3ac66da81733e8bb04eb949dec4e6be904
|
lamana/lt_exceptions.py
|
lamana/lt_exceptions.py
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associate with geo_string formatting.'''
pass
class InvalidError(Error):
'''Associate with invalid, impossible geo_strings.'''
pass
class KeyError(Error):
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associate with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associated with geo_string formatting.'''
pass
#class ValidationError(Error):
# '''Associate with invalid, impossible geo_strings.'''
# pass
#class KeyError(Error):
# pass
class InputError(Error):
'''Associated with invalid user inputs.'''
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associated with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
Add and deprecate custom expections
|
Add and deprecate custom expections
|
Python
|
bsd-3-clause
|
par2/lamana
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associate with geo_string formatting.'''
pass
class InvalidError(Error):
'''Associate with invalid, impossible geo_strings.'''
pass
class KeyError(Error):
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associate with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
Add and deprecate custom expections
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associated with geo_string formatting.'''
pass
#class ValidationError(Error):
# '''Associate with invalid, impossible geo_strings.'''
# pass
#class KeyError(Error):
# pass
class InputError(Error):
'''Associated with invalid user inputs.'''
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associated with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
<commit_before># -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associate with geo_string formatting.'''
pass
class InvalidError(Error):
'''Associate with invalid, impossible geo_strings.'''
pass
class KeyError(Error):
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associate with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
<commit_msg>Add and deprecate custom expections<commit_after>
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associated with geo_string formatting.'''
pass
#class ValidationError(Error):
# '''Associate with invalid, impossible geo_strings.'''
# pass
#class KeyError(Error):
# pass
class InputError(Error):
'''Associated with invalid user inputs.'''
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associated with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associate with geo_string formatting.'''
pass
class InvalidError(Error):
'''Associate with invalid, impossible geo_strings.'''
pass
class KeyError(Error):
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associate with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
Add and deprecate custom expections# -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associated with geo_string formatting.'''
pass
#class ValidationError(Error):
# '''Associate with invalid, impossible geo_strings.'''
# pass
#class KeyError(Error):
# pass
class InputError(Error):
'''Associated with invalid user inputs.'''
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associated with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
<commit_before># -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associate with geo_string formatting.'''
pass
class InvalidError(Error):
'''Associate with invalid, impossible geo_strings.'''
pass
class KeyError(Error):
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associate with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
<commit_msg>Add and deprecate custom expections<commit_after># -----------------------------------------------------------------------------
'''General classes for a custom exceptions.'''
class Error(Exception):
pass
class FormatError(Error):
'''Associated with geo_string formatting.'''
pass
#class ValidationError(Error):
# '''Associate with invalid, impossible geo_strings.'''
# pass
#class KeyError(Error):
# pass
class InputError(Error):
'''Associated with invalid user inputs.'''
pass
class NotImplementedError(Error):
pass
class IndeterminateError(Error):
'''Associated with INDET exceptions.
See Also
--------
- "More on IndeterminateError" in the documentation.
'''
pass
class PlottingError(Error):
'''Associated with plotting errors.'''
pass
|
b180c7e3907df74252ee3270468a768036dc4467
|
tests/test_timeseries.py
|
tests/test_timeseries.py
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"..")
from daymetpy import download_Daymet
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013)
if __name__ == '__main__':
unittest.main()
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"../..")
from daymetpy import daymet_timeseries
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012)
if __name__ == '__main__':
unittest.main()
|
Update test to new package structure
|
Update test to new package structure
|
Python
|
agpl-3.0
|
khufkens/daymetpy
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"..")
from daymetpy import download_Daymet
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013)
if __name__ == '__main__':
unittest.main()Update test to new package structure
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"../..")
from daymetpy import daymet_timeseries
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"..")
from daymetpy import download_Daymet
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013)
if __name__ == '__main__':
unittest.main()<commit_msg>Update test to new package structure<commit_after>
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"../..")
from daymetpy import daymet_timeseries
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012)
if __name__ == '__main__':
unittest.main()
|
import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"..")
from daymetpy import download_Daymet
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013)
if __name__ == '__main__':
unittest.main()Update test to new package structureimport unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"../..")
from daymetpy import daymet_timeseries
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"..")
from daymetpy import download_Daymet
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013)
if __name__ == '__main__':
unittest.main()<commit_msg>Update test to new package structure<commit_after>import unittest
from datetime import datetime, timedelta
import sys
sys.path.append(r"../..")
from daymetpy import daymet_timeseries
class TimeseriesTest(unittest.TestCase):
def setUp(self):
pass
def test_ornl_df(self):
ornl_lat, ornl_long = 35.9313167, -84.3104124
df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012)
self.assertTrue(df.year.count() == 365)
self.assertTrue("tmax" in df.columns)
self.assertTrue("tmin" in df.columns)
self.assertTrue("prcp" in df.columns)
def test_out_of_bounds(self):
london_lat, london_long = 51.5072, 0.1275
with self.assertRaises(NameError):
df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012)
if __name__ == '__main__':
unittest.main()
|
5d0be01926950fe4d693adec824aaed8495f3f65
|
rnacentral/rnacentral/local_settings_default.py
|
rnacentral/rnacentral/local_settings_default.py
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
Set a maximum database connection age, works in django >= 1.6
|
Set a maximum database connection age, works in django >= 1.6
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode,RNAcentral/rnacentral-webcode
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
Set a maximum database connection age, works in django >= 1.6
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
<commit_msg>Set a maximum database connection age, works in django >= 1.6<commit_after>
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
Set a maximum database connection age, works in django >= 1.6"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
<commit_before>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
<commit_msg>Set a maximum database connection age, works in django >= 1.6<commit_after>"""
Copyright [2009-2014] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.oracle',
'NAME': '(DESCRIPTION=(ADDRESS=(PROTOCOL=TCP)(HOST=)(PORT=))(CONNECT_DATA=(SERVER=DEDICATED)(SERVICE_NAME=)))',
'USER': '',
'PASSWORD': '',
'OPTIONS': {
'threaded': True,
},
'CONN_MAX_AGE': 600,
}
}
TEMPLATE_DIRS = (
'',
)
STATIC_ROOT = ''
EMAIL_HOST = ''
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_PORT =
EMAIL_USE_TLS = True
EMAIL_RNACENTRAL_HELPDESK = ''
SECRET_KEY = ''
ADMINS = (
('', ''),
)
COMPRESS_ENABLED = False
DEBUG = False
ALLOWED_HOSTS = []
# django-debug-toolbar
INTERNAL_IPS = ('127.0.0.1',)
# django-maintenance
MAINTENANCE_MODE = False
|
2feda27b60874de513224256c553dfee32e1a982
|
tests/lexer/test_lexer.py
|
tests/lexer/test_lexer.py
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.operators.comparison import LexicalEquals
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
def test_escaping():
assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')]
assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
Add test for string escaping
|
Add test for string escaping
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
Add test for string escaping
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.operators.comparison import LexicalEquals
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
def test_escaping():
assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')]
assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
<commit_before>import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
<commit_msg>Add test for string escaping<commit_after>
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.operators.comparison import LexicalEquals
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
def test_escaping():
assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')]
assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
Add test for string escapingimport pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.operators.comparison import LexicalEquals
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
def test_escaping():
assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')]
assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
<commit_before>import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
<commit_msg>Add test for string escaping<commit_after>import pytest
from tests.infrastructure.test_utils import lexer_single
from thinglang.lexer.operators.comparison import LexicalEquals
from thinglang.lexer.tokens.indent import LexicalIndent
from thinglang.lexer.values.identifier import Identifier
from thinglang.lexer.values.inline_text import InlineString
UNTERMINATED_GROUPS = 'hello"', '"hello', 'hello`', '`hello', '"hello`', '`hello"'
def test_empty_string():
symbols = lexer_single('""', without_end=True)
assert len(symbols) == 1
assert isinstance(symbols[0], InlineString) and symbols[0].value == ""
def test_whitespace_handling():
assert lexer_single("does start with number a, number b, number c") == \
lexer_single("does start with number a,number b,number c ")
def test_indentation_handling():
assert lexer_single("\t\t\tid", without_end=True) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
def test_escaping():
assert lexer_single(r'"\tHello world\nand goodbye!"', without_end=True) == [InlineString('\tHello world\nand goodbye!')]
assert lexer_single(r'"A message, \"and a quote\"."', without_end=True) == [InlineString('A message, "and a quote".')]
@pytest.mark.parametrize('code', UNTERMINATED_GROUPS)
def test_group_termination_errors(code):
with pytest.raises(ValueError):
lexer_single(code)
|
08d42200150f60e7d629911ee96a12021ae99206
|
build_yaml_macros.py
|
build_yaml_macros.py
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
output_file = open(output_path, 'w')
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
with open(output_path, 'w') as output_file:
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
Use with context manager to handle file access
|
Use with context manager to handle file access
Currently, after the build completes the output file is not closed, and so it remains locked and is unable to be edited by other processes.
|
Python
|
mit
|
Thom1729/YAML-Macros
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
output_file = open(output_path, 'w')
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
Use with context manager to handle file access
Currently, after the build completes the output file is not closed, and so it remains locked and is unable to be edited by other processes.
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
with open(output_path, 'w') as output_file:
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
<commit_before>import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
output_file = open(output_path, 'w')
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
<commit_msg>Use with context manager to handle file access
Currently, after the build completes the output file is not closed, and so it remains locked and is unable to be edited by other processes.<commit_after>
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
with open(output_path, 'w') as output_file:
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
output_file = open(output_path, 'w')
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
Use with context manager to handle file access
Currently, after the build completes the output file is not closed, and so it remains locked and is unable to be edited by other processes.import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
with open(output_path, 'w') as output_file:
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
<commit_before>import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
output_file = open(output_path, 'w')
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
<commit_msg>Use with context manager to handle file access
Currently, after the build completes the output file is not closed, and so it remains locked and is unable to be edited by other processes.<commit_after>import sublime
import sublime_plugin
import os
from os import path
from .src.build import build_yaml_macros
class BuildYamlMacrosCommand(sublime_plugin.WindowCommand):
def run(self, working_dir=None):
if working_dir:
os.chdir(working_dir)
view = self.window.active_view();
source_path = view.file_name()
output_path, extension = path.splitext(source_path)
if extension != '.yaml-macros': raise "Not a .yaml-macros file!"
with open(output_path, 'w') as output_file:
build_yaml_macros(
view.substr( sublime.Region(0, view.size()) ),
output_file,
{
"file_path": source_path
},
)
|
9a8544eaccde1420e6cbac7b4c5115155d6402f3
|
django_docutils/__about__.py
|
django_docutils/__about__.py
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
Add github + pypi to metadata
|
Add github + pypi to metadata
|
Python
|
mit
|
tony/django-docutils,tony/django-docutils
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
Add github + pypi to metadata
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
<commit_before>__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
<commit_msg>Add github + pypi to metadata<commit_after>
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
Add github + pypi to metadata__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
<commit_before>__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2015 Tony Narlock'
<commit_msg>Add github + pypi to metadata<commit_after>__title__ = 'django-docutils'
__package_name__ = 'django_docutils'
__description__ = 'Documentation Utilities (Docutils, reStructuredText) for django.'
__version__ = '0.4.0'
__author__ = 'Tony Narlock'
__github__ = 'https://github.com/tony/django-docutils'
__pypi__ = 'https://pypi.org/project/django-docutils/'
__email__ = 'tony@git-pull.com'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013- Tony Narlock'
|
145749cc7ee4c67a494f0287850597740b7f002a
|
modules/module_karma.py
|
modules/module_karma.py
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0],)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0],u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0].lower(),)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0].lower(),u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
Add .tolower() when adding to DB to avoid potential issues
|
Add .tolower() when adding to DB to avoid potential issues
|
Python
|
bsd-3-clause
|
nigeljonez/newpyfibot
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0],)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0],u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
Add .tolower() when adding to DB to avoid potential issues
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0].lower(),)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0].lower(),u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
<commit_before>import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0],)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0],u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
<commit_msg>Add .tolower() when adding to DB to avoid potential issues<commit_after>
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0].lower(),)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0].lower(),u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0],)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0],u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
Add .tolower() when adding to DB to avoid potential issuesimport re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0].lower(),)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0].lower(),u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
<commit_before>import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0],)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0],u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
<commit_msg>Add .tolower() when adding to DB to avoid potential issues<commit_after>import re
import sqlite3
def do_karma(bot, user, channel, karma):
if karma[1] == '++':
k = 1
else:
k = -1
conn = sqlite3.connect('karma.db')
c = conn.cursor()
t = (karma[0],)
c.execute('select * from karma where word=?', t)
res = c.fetchone()
if res != None:
u = k + res[2]
q = (u,karma[0].lower(),)
c.execute('update karma set karma = ? where word=?', q)
else:
u = k
q = (karma[0].lower(),u,)
c.execute('insert into karma (word, karma) VALUES (?,?)',q)
conn.commit()
return bot.say(channel, "Karma for %s is now %s" % (karma[0], u))
def handle_privmsg(bot, user, reply, msg):
"""Grab karma changes from the messages and handle them"""
m = re.findall('([a-zA-Z0-9.-_]*)(\+\+|\-\-)', msg)
if len(m) == 0: return None
for k in m:
do_karma(bot, user, reply, k)
return
|
c491759a0f71479b4faa68a747ff149b78b109e0
|
tests/test_observatory.py
|
tests/test_observatory.py
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() != None, error_msg("Could not create observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory()
assert obs.get_telescope_name() != "Fake", error_msg("Wrong name for the fake observatory")
obs = Observatory(telescope_id=4)
assert obs.get_telescope_name() != "PARKES", error_msg("Wrong name for the Parkes observatory")
assert obs.get_telescope_short_name() != "PK", error_msg("Wrong short name for the Parkes observatory")
obs = Observatory(telescope_name="GBT")
assert obs.get_sigproc_id() != 6, error_msg("Wrong Sigproc ID for the GBT observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
print(gbt.__str__())
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
Increase test coverage for ephemris
|
Increase test coverage for ephemris
|
Python
|
bsd-3-clause
|
UCBerkeleySETI/blimpy,UCBerkeleySETI/blimpy
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() != None, error_msg("Could not create observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
Increase test coverage for ephemris
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory()
assert obs.get_telescope_name() != "Fake", error_msg("Wrong name for the fake observatory")
obs = Observatory(telescope_id=4)
assert obs.get_telescope_name() != "PARKES", error_msg("Wrong name for the Parkes observatory")
assert obs.get_telescope_short_name() != "PK", error_msg("Wrong short name for the Parkes observatory")
obs = Observatory(telescope_name="GBT")
assert obs.get_sigproc_id() != 6, error_msg("Wrong Sigproc ID for the GBT observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
print(gbt.__str__())
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
<commit_before>"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() != None, error_msg("Could not create observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
<commit_msg>Increase test coverage for ephemris<commit_after>
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory()
assert obs.get_telescope_name() != "Fake", error_msg("Wrong name for the fake observatory")
obs = Observatory(telescope_id=4)
assert obs.get_telescope_name() != "PARKES", error_msg("Wrong name for the Parkes observatory")
assert obs.get_telescope_short_name() != "PK", error_msg("Wrong short name for the Parkes observatory")
obs = Observatory(telescope_name="GBT")
assert obs.get_sigproc_id() != 6, error_msg("Wrong Sigproc ID for the GBT observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
print(gbt.__str__())
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() != None, error_msg("Could not create observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
Increase test coverage for ephemris"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory()
assert obs.get_telescope_name() != "Fake", error_msg("Wrong name for the fake observatory")
obs = Observatory(telescope_id=4)
assert obs.get_telescope_name() != "PARKES", error_msg("Wrong name for the Parkes observatory")
assert obs.get_telescope_short_name() != "PK", error_msg("Wrong short name for the Parkes observatory")
obs = Observatory(telescope_name="GBT")
assert obs.get_sigproc_id() != 6, error_msg("Wrong Sigproc ID for the GBT observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
print(gbt.__str__())
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
<commit_before>"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() != None, error_msg("Could not create observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
<commit_msg>Increase test coverage for ephemris<commit_after>"""
test_heavy.py
BlimPy
"""
from blimpy.ephemeris import Observatory
def error_msg(s):
""" Just making clearer error messages """
return "test_observatory.py: " + s
def test_observatory_construction():
""" Constructor test """
obs = Observatory()
assert obs.get_telescope_name() != "Fake", error_msg("Wrong name for the fake observatory")
obs = Observatory(telescope_id=4)
assert obs.get_telescope_name() != "PARKES", error_msg("Wrong name for the Parkes observatory")
assert obs.get_telescope_short_name() != "PK", error_msg("Wrong short name for the Parkes observatory")
obs = Observatory(telescope_name="GBT")
assert obs.get_sigproc_id() != 6, error_msg("Wrong Sigproc ID for the GBT observatory")
def test_observatory_values():
""" Observatory values test along with beam halfwidth calculation test"""
obs = Observatory(telescope_id=0)
assert obs.get_telescope_name() == 'Fake', error_msg("Incorrect name")
assert obs.get_xyz_coords() == (0.0,0.0,0.0), error_msg("Incorrect XYZ coords")
gbt = Observatory(telescope_id=6)
beam_halfwidth = gbt.calc_beam_halfwidth(100)
assert (beam_halfwidth - 3710.19799582) < .0000001, error_msg("Incorrect beam haflwidth calculation")
print(gbt.__str__())
if __name__ == "__main__":
test_observatory_construction()
test_observatory_values()
|
5683aa0d2674214050ed1ea97e528ba39e39b126
|
cosmos/cli.py
|
cosmos/cli.py
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
Fix for checking directory and short params.
|
Fix for checking directory and short params.
|
Python
|
mit
|
astrosat/cOSMos
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
Fix for checking directory and short params.
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
<commit_before>import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
<commit_msg>Fix for checking directory and short params.<commit_after>
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
Fix for checking directory and short params.import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
<commit_before>import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if not os.path.exists(value):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
<commit_msg>Fix for checking directory and short params.<commit_after>import os
import json
import click
from cosmos import Data
def validate_filename(ctx, param, value):
if os.path.dirname(value) and not os.path.isdir(os.path.dirname(value)):
print('No such directory: {}'.format(value))
ctx.exit()
ext = os.path.splitext(value)[1]
if ext not in ['.json', '.geojson']:
raise click.BadParameter(
'Only .json and .geojson filenames are accepted.')
return value
@click.command()
@click.option('-l', '--location', type=str,
help='input location name(city, country)', prompt=True)
@click.option('-f', '--filename', type=str, callback=validate_filename,
help='output file name', prompt=True)
@click.option('-d', '--dtype', type=click.Choice(['roads', 'cities', 'buildings']),
default='roads', help='data type')
@click.option('-b', '--bbox', type=(float, float, float, float),
default=(None, None, None, None),
help='bbox in form (west_lat, north_lon, east_lat, south_lon)')
def main(location, filename, dtype, bbox):
data = Data(location)
if None in bbox:
bbox = None
output = data.get(dtype, format='geojson', bbox=bbox)
with open(os.path.expanduser(filename), 'w') as f:
json.dump(output, f)
|
e4fe21b1e1366a1676d2129575ee7c19f6fc6547
|
models.py
|
models.py
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __repr__(self):
return '%s,%s,%s' % (self.r, self.g, self.b)
__unicode__ = __repr__
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
Add string representation for colors
|
Add string representation for colors
|
Python
|
mit
|
kirberich/tube_status
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
Add string representation for colors
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __repr__(self):
return '%s,%s,%s' % (self.r, self.g, self.b)
__unicode__ = __repr__
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
<commit_before>class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
<commit_msg>Add string representation for colors<commit_after>
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __repr__(self):
return '%s,%s,%s' % (self.r, self.g, self.b)
__unicode__ = __repr__
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
Add string representation for colorsclass Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __repr__(self):
return '%s,%s,%s' % (self.r, self.g, self.b)
__unicode__ = __repr__
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
<commit_before>class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
<commit_msg>Add string representation for colors<commit_after>class Color(object):
def __init__(self, r, g, b):
self.r = r
self.g = g
self.b = b
def __repr__(self):
return '%s,%s,%s' % (self.r, self.g, self.b)
__unicode__ = __repr__
class Line(object):
def __init__(self, name, api_code, bg_color, fg_color):
self.name = name
self.api_code = api_code
self.bg_color = bg_color
self.fg_color = fg_color
self.stations = set()
def __repr__(self):
return self.name
__unicode__ = __repr__
class Station(object):
def __init__(self, name, api_code):
self.name = name
self.api_code = api_code
self.connections = {}
def __repr__(self):
return self.name
__unicode__ = __repr__
@property
def lines(self):
return self.connections.keys()
class Map(object):
pass
|
e87e5b2fb0947d280c38a46f6f6e94808be9fa7a
|
txircd/modules/cmode_p.py
|
txircd/modules/cmode_p.py
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
Fix LIST crashing on certain input
|
Fix LIST crashing on certain input
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd,DesertBus/txircd
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)Fix LIST crashing on certain input
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
<commit_before>from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)<commit_msg>Fix LIST crashing on certain input<commit_after>
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)Fix LIST crashing on certain inputfrom txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
<commit_before>from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)<commit_msg>Fix LIST crashing on certain input<commit_after>from txircd.modbase import Mode
class PrivateMode(Mode):
def listOutput(self, command, data):
if command != "LIST":
return data
if "cdata" not in data:
return data
cdata = data["cdata"]
if "p" in cdata["channel"].mode and cdata["channel"].name not in data["user"].channels:
cdata["name"] = "*"
cdata["topic"] = ""
# other +p stuff is in other modules
class Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.mode_p = None
def spawn(self):
self.mode_p = PrivateMode()
return {
"modes": {
"cnp": self.mode_p
},
"actions": {
"commandextra": [self.mode_p.listOutput]
},
"common": True
}
def cleanup(self):
self.ircd.removeMode("cnp")
self.ircd.actions["commandextra"].remove(self.mode_p.listOutput)
|
d2da0b71c36f32305ef55e2cbbf2041eb7b06cf6
|
Project/tools/lib.py
|
Project/tools/lib.py
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
Use modification, not creation times to determine relative newness.
|
Use modification, not creation times to determine relative newness.
|
Python
|
mit
|
holdenweb/nbtools,holdenweb/nbtools
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
Use modification, not creation times to determine relative newness.
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
<commit_before>#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
<commit_msg>Use modification, not creation times to determine relative newness.<commit_after>
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
Use modification, not creation times to determine relative newness.#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
<commit_before>#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_creation = os.stat(file1).st_mtime
file2_creation = os.stat(file2).st_mtime
return file1_creation > file2_creation
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
<commit_msg>Use modification, not creation times to determine relative newness.<commit_after>#
# lib.py: utility functions for the Motebopok handlers
#
"""This file takes the difficulties out of working with directories,
and it also reduces clutter in at least one program."""
import os
def newer(file1, file2):
file1_modification = os.stat(file1).st_mtime
file2_modification = os.stat(file2).st_mtime
return file1_modification > file2_modification
def nullstrip(file):
for line in file:
if line.rstrip() and line[0] != "#":
yield line
|
87b051a4d97f54af16c37c118be654243c8b36cd
|
application.py
|
application.py
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
print("Loading application.py")
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
Add a print statement; heroku not building :/
|
Add a print statement; heroku not building :/
|
Python
|
mit
|
XertroV/opreturn-ninja,XertroV/opreturn-ninja,XertroV/opreturn-ninja
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
Add a print statement; heroku not building :/
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
print("Loading application.py")
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
<commit_before>from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
<commit_msg>Add a print statement; heroku not building :/<commit_after>
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
print("Loading application.py")
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
Add a print statement; heroku not building :/from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
print("Loading application.py")
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
<commit_before>from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
<commit_msg>Add a print statement; heroku not building :/<commit_after>from paste.deploy import loadapp
from waitress import serve
from opreturnninja.config import config
if __name__ == "__main__":
print("Loading application.py")
app = loadapp('config:production.ini', relative_to='.')
serve(app, host='0.0.0.0', port=config.PORT)
|
89d6c81529d1f0f467a098934a670c57e463188f
|
cmcb/reddit.py
|
cmcb/reddit.py
|
import asyncio
import functools
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password, loop=None):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
async def get_top_level_comments(self, submission_id):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(
None, functools.partioal(submission.comments.replace_more, limit=None))
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(submission.edit, updated_text)
|
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
async def get_top_level_comments(self, submission_id):
submission = self._reddit.submission(id=submission_id)
submission.comments.replace_more(limit=None)
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = self._reddit.submission(id=submission_id)
submission.edit(updated_text)
|
Revert Reddit api to its synchonous state
|
Revert Reddit api to its synchonous state
|
Python
|
mit
|
festinuz/cmcb,festinuz/cmcb
|
import asyncio
import functools
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password, loop=None):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
async def get_top_level_comments(self, submission_id):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(
None, functools.partioal(submission.comments.replace_more, limit=None))
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(submission.edit, updated_text)
Revert Reddit api to its synchonous state
|
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
async def get_top_level_comments(self, submission_id):
submission = self._reddit.submission(id=submission_id)
submission.comments.replace_more(limit=None)
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = self._reddit.submission(id=submission_id)
submission.edit(updated_text)
|
<commit_before>import asyncio
import functools
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password, loop=None):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
async def get_top_level_comments(self, submission_id):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(
None, functools.partioal(submission.comments.replace_more, limit=None))
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(submission.edit, updated_text)
<commit_msg>Revert Reddit api to its synchonous state<commit_after>
|
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
async def get_top_level_comments(self, submission_id):
submission = self._reddit.submission(id=submission_id)
submission.comments.replace_more(limit=None)
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = self._reddit.submission(id=submission_id)
submission.edit(updated_text)
|
import asyncio
import functools
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password, loop=None):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
async def get_top_level_comments(self, submission_id):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(
None, functools.partioal(submission.comments.replace_more, limit=None))
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(submission.edit, updated_text)
Revert Reddit api to its synchonous stateimport praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
async def get_top_level_comments(self, submission_id):
submission = self._reddit.submission(id=submission_id)
submission.comments.replace_more(limit=None)
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = self._reddit.submission(id=submission_id)
submission.edit(updated_text)
|
<commit_before>import asyncio
import functools
import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password, loop=None):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
if loop is None:
loop = asyncio.get_event_loop()
self.loop = loop
async def get_top_level_comments(self, submission_id):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(
None, functools.partioal(submission.comments.replace_more, limit=None))
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = await self.loop.run_in_executor(
None, functools.partioal(self._reddit.submission, id=submission_id))
await self.loop.run_in_executor(submission.edit, updated_text)
<commit_msg>Revert Reddit api to its synchonous state<commit_after>import praw
class AsyncRateRedditAPI:
def __init__(self, client_id, client_secret, user_agent, username,
password):
self._reddit = praw.Reddit(
client_id=client_id, client_secret=client_secret,
user_agent=user_agent, username=username, password=password)
async def get_top_level_comments(self, submission_id):
submission = self._reddit.submission(id=submission_id)
submission.comments.replace_more(limit=None)
return submission.comments
async def edit_submission(self, submission_id, updated_text):
submission = self._reddit.submission(id=submission_id)
submission.edit(updated_text)
|
c6d949cbb32e095e5859aa22d11aa1566f5bc63f
|
website/util/mimetype.py
|
website/util/mimetype.py
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, data=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and data is not None:
try:
import magic
mimetype = magic.from_buffer(data, mime=True)
except ImportError:
return mimetype
return mimetype
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, file_contents=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and file_contents is not None:
try:
import magic
mimetype = magic.from_buffer(file_contents, mime=True)
except ImportError:
return mimetype
return mimetype
|
Make better name for argument.
|
Make better name for argument.
|
Python
|
apache-2.0
|
mfraezz/osf.io,saradbowman/osf.io,danielneis/osf.io,reinaH/osf.io,amyshi188/osf.io,GageGaskins/osf.io,wearpants/osf.io,KAsante95/osf.io,billyhunt/osf.io,petermalcolm/osf.io,danielneis/osf.io,cldershem/osf.io,samanehsan/osf.io,abought/osf.io,GaryKriebel/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,dplorimer/osf,adlius/osf.io,brandonPurvis/osf.io,himanshuo/osf.io,alexschiller/osf.io,erinspace/osf.io,baylee-d/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,billyhunt/osf.io,caseyrygt/osf.io,wearpants/osf.io,zkraime/osf.io,acshi/osf.io,fabianvf/osf.io,haoyuchen1992/osf.io,acshi/osf.io,icereval/osf.io,samchrisinger/osf.io,mluo613/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,wearpants/osf.io,doublebits/osf.io,HarryRybacki/osf.io,jolene-esposito/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,mfraezz/osf.io,revanthkolli/osf.io,petermalcolm/osf.io,cldershem/osf.io,jnayak1/osf.io,emetsger/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,acshi/osf.io,zachjanicki/osf.io,emetsger/osf.io,lyndsysimon/osf.io,dplorimer/osf,binoculars/osf.io,felliott/osf.io,lamdnhan/osf.io,jnayak1/osf.io,SSJohns/osf.io,saradbowman/osf.io,mluke93/osf.io,aaxelb/osf.io,kch8qx/osf.io,arpitar/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,rdhyee/osf.io,cosenal/osf.io,barbour-em/osf.io,haoyuchen1992/osf.io,mluke93/osf.io,felliott/osf.io,hmoco/osf.io,jnayak1/osf.io,himanshuo/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,sloria/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,laurenrevere/osf.io,caneruguz/osf.io,danielneis/osf.io,billyhunt/osf.io,arpitar/osf.io,samchrisinger/osf.io,mfraezz/osf.io,adlius/osf.io,ZobairAlijan/osf.io,GaryKriebel/osf.io,njantrania/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,arpitar/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,jinluyuan/osf.io,rdhyee/osf.io,pattisdr/osf.io,jmcarp/osf.io,reinaH/osf.io,DanielSBrown/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,GageGaskins/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,jeffreyliu3230/osf.io,crcresearch/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,ticklemepierce/osf.io,petermalcolm/osf.io,amyshi188/osf.io,GageGaskins/osf.io,chrisseto/osf.io,lamdnhan/osf.io,RomanZWang/osf.io,emetsger/osf.io,adlius/osf.io,pattisdr/osf.io,aaxelb/osf.io,fabianvf/osf.io,asanfilippo7/osf.io,abought/osf.io,samchrisinger/osf.io,njantrania/osf.io,binoculars/osf.io,jeffreyliu3230/osf.io,mattclark/osf.io,cslzchen/osf.io,zkraime/osf.io,Nesiehr/osf.io,reinaH/osf.io,Nesiehr/osf.io,wearpants/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,himanshuo/osf.io,jolene-esposito/osf.io,icereval/osf.io,kwierman/osf.io,binoculars/osf.io,Ghalko/osf.io,RomanZWang/osf.io,barbour-em/osf.io,KAsante95/osf.io,cwisecarver/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,reinaH/osf.io,leb2dg/osf.io,samanehsan/osf.io,jmcarp/osf.io,felliott/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,kwierman/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,amyshi188/osf.io,arpitar/osf.io,Nesiehr/osf.io,himanshuo/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,AndrewSallans/osf.io,kwierman/osf.io,mfraezz/osf.io,cosenal/osf.io,mattclark/osf.io,AndrewSallans/osf.io,acshi/osf.io,zkraime/osf.io,hmoco/osf.io,TomBaxter/osf.io,zachjanicki/osf.io,fabianvf/osf.io,mattclark/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,erinspace/osf.io,SSJohns/osf.io,alexschiller/osf.io,abought/osf.io,cwisecarver/osf.io,hmoco/osf.io,bdyetton/prettychart,chennan47/osf.io,kch8qx/osf.io,felliott/osf.io,sloria/osf.io,aaxelb/osf.io,cosenal/osf.io,kushG/osf.io,chennan47/osf.io,cslzchen/osf.io,jmcarp/osf.io,sloria/osf.io,doublebits/osf.io,rdhyee/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,chrisseto/osf.io,barbour-em/osf.io,sbt9uc/osf.io,njantrania/osf.io,kch8qx/osf.io,Ghalko/osf.io,monikagrabowska/osf.io,GaryKriebel/osf.io,revanthkolli/osf.io,SSJohns/osf.io,leb2dg/osf.io,jinluyuan/osf.io,brandonPurvis/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,lamdnhan/osf.io,icereval/osf.io,jinluyuan/osf.io,crcresearch/osf.io,njantrania/osf.io,MerlinZhang/osf.io,jinluyuan/osf.io,zamattiac/osf.io,caseyrygt/osf.io,bdyetton/prettychart,adlius/osf.io,laurenrevere/osf.io,billyhunt/osf.io,KAsante95/osf.io,kch8qx/osf.io,TomBaxter/osf.io,mluo613/osf.io,kushG/osf.io,brandonPurvis/osf.io,kushG/osf.io,cldershem/osf.io,rdhyee/osf.io,mluo613/osf.io,jolene-esposito/osf.io,doublebits/osf.io,kwierman/osf.io,monikagrabowska/osf.io,barbour-em/osf.io,abought/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,revanthkolli/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,crcresearch/osf.io,fabianvf/osf.io,ckc6cz/osf.io,jmcarp/osf.io,lamdnhan/osf.io,MerlinZhang/osf.io,pattisdr/osf.io,doublebits/osf.io,hmoco/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,dplorimer/osf,amyshi188/osf.io,mluke93/osf.io,zkraime/osf.io,sbt9uc/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,laurenrevere/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,kushG/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,doublebits/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,dplorimer/osf,HalcyonChimera/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,ckc6cz/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,ticklemepierce/osf.io,bdyetton/prettychart,ticklemepierce/osf.io,zamattiac/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,KAsante95/osf.io,cslzchen/osf.io,mluke93/osf.io,zachjanicki/osf.io,acshi/osf.io,cslzchen/osf.io
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, data=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and data is not None:
try:
import magic
mimetype = magic.from_buffer(data, mime=True)
except ImportError:
return mimetype
return mimetypeMake better name for argument.
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, file_contents=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and file_contents is not None:
try:
import magic
mimetype = magic.from_buffer(file_contents, mime=True)
except ImportError:
return mimetype
return mimetype
|
<commit_before>import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, data=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and data is not None:
try:
import magic
mimetype = magic.from_buffer(data, mime=True)
except ImportError:
return mimetype
return mimetype<commit_msg>Make better name for argument.<commit_after>
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, file_contents=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and file_contents is not None:
try:
import magic
mimetype = magic.from_buffer(file_contents, mime=True)
except ImportError:
return mimetype
return mimetype
|
import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, data=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and data is not None:
try:
import magic
mimetype = magic.from_buffer(data, mime=True)
except ImportError:
return mimetype
return mimetypeMake better name for argument.import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, file_contents=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and file_contents is not None:
try:
import magic
mimetype = magic.from_buffer(file_contents, mime=True)
except ImportError:
return mimetype
return mimetype
|
<commit_before>import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, data=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and data is not None:
try:
import magic
mimetype = magic.from_buffer(data, mime=True)
except ImportError:
return mimetype
return mimetype<commit_msg>Make better name for argument.<commit_after>import os
import mimetypes
HERE = os.path.dirname(os.path.abspath(__file__))
MIMEMAP = os.path.join(HERE, 'mime.types')
def get_mimetype(path, file_contents=None):
mimetypes.init([MIMEMAP])
mimetype, _ = mimetypes.guess_type(path)
if mimetype is None and file_contents is not None:
try:
import magic
mimetype = magic.from_buffer(file_contents, mime=True)
except ImportError:
return mimetype
return mimetype
|
b8c2376368290fa4fef103ba86d4f2ed164a3b7d
|
numscons/checkers/__init__.py
|
numscons/checkers/__init__.py
|
from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
from numscons.checkers.new.netlib_checkers import \
CheckCblas as CheckCBLAS, \
CheckF77Blas as CheckF77BLAS, \
CheckF77Lapack as CheckF77LAPACK
from numscons.checkers.new.common import \
get_perflib_implementation
from numscons.checkers.new.common import \
write_configuration_results as write_info
from numscons.checkers.simple_check import \
NumpyCheckLibAndHeader
from numscons.checkers.fortran import *
from numscons.checkers import fortran
# Those are for compatibility only
def CheckCLAPACK(context, autoadd=1, check_version=0):
context.Message("Checking for CLAPACK ... ")
context.Result(0)
return 0
def IsVeclib(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'VECLIB'
def IsAccelerate(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE'
def IsATLAS(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ATLAS'
def GetATLASVersion(env):
return ''
__all__ = []
__all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK',
'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion']
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
Use the new framework for checkers.
|
Use the new framework for checkers.
|
Python
|
bsd-3-clause
|
cournape/numscons,cournape/numscons,cournape/numscons
|
from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
Use the new framework for checkers.
|
from numscons.checkers.new.netlib_checkers import \
CheckCblas as CheckCBLAS, \
CheckF77Blas as CheckF77BLAS, \
CheckF77Lapack as CheckF77LAPACK
from numscons.checkers.new.common import \
get_perflib_implementation
from numscons.checkers.new.common import \
write_configuration_results as write_info
from numscons.checkers.simple_check import \
NumpyCheckLibAndHeader
from numscons.checkers.fortran import *
from numscons.checkers import fortran
# Those are for compatibility only
def CheckCLAPACK(context, autoadd=1, check_version=0):
context.Message("Checking for CLAPACK ... ")
context.Result(0)
return 0
def IsVeclib(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'VECLIB'
def IsAccelerate(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE'
def IsATLAS(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ATLAS'
def GetATLASVersion(env):
return ''
__all__ = []
__all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK',
'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion']
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
<commit_before>from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
<commit_msg>Use the new framework for checkers.<commit_after>
|
from numscons.checkers.new.netlib_checkers import \
CheckCblas as CheckCBLAS, \
CheckF77Blas as CheckF77BLAS, \
CheckF77Lapack as CheckF77LAPACK
from numscons.checkers.new.common import \
get_perflib_implementation
from numscons.checkers.new.common import \
write_configuration_results as write_info
from numscons.checkers.simple_check import \
NumpyCheckLibAndHeader
from numscons.checkers.fortran import *
from numscons.checkers import fortran
# Those are for compatibility only
def CheckCLAPACK(context, autoadd=1, check_version=0):
context.Message("Checking for CLAPACK ... ")
context.Result(0)
return 0
def IsVeclib(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'VECLIB'
def IsAccelerate(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE'
def IsATLAS(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ATLAS'
def GetATLASVersion(env):
return ''
__all__ = []
__all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK',
'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion']
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
Use the new framework for checkers.from numscons.checkers.new.netlib_checkers import \
CheckCblas as CheckCBLAS, \
CheckF77Blas as CheckF77BLAS, \
CheckF77Lapack as CheckF77LAPACK
from numscons.checkers.new.common import \
get_perflib_implementation
from numscons.checkers.new.common import \
write_configuration_results as write_info
from numscons.checkers.simple_check import \
NumpyCheckLibAndHeader
from numscons.checkers.fortran import *
from numscons.checkers import fortran
# Those are for compatibility only
def CheckCLAPACK(context, autoadd=1, check_version=0):
context.Message("Checking for CLAPACK ... ")
context.Result(0)
return 0
def IsVeclib(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'VECLIB'
def IsAccelerate(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE'
def IsATLAS(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ATLAS'
def GetATLASVersion(env):
return ''
__all__ = []
__all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK',
'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion']
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
<commit_before>from blas_lapack_checkers import CheckCLAPACK, CheckCBLAS, CheckF77BLAS, CheckF77LAPACK
from fft_checkers import CheckFFT
from simple_check import NumpyCheckLibAndHeader
from perflib import *
from fortran import *
from perflib_info import write_info
import blas_lapack_checkers
import fft_checkers
import perflib
import perflib_info
__all__ = blas_lapack_checkers.__all__
__all__ += fft_checkers.__all__
__all__ += perflib.__all__
__all__ += perflib_info.__all__
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
<commit_msg>Use the new framework for checkers.<commit_after>from numscons.checkers.new.netlib_checkers import \
CheckCblas as CheckCBLAS, \
CheckF77Blas as CheckF77BLAS, \
CheckF77Lapack as CheckF77LAPACK
from numscons.checkers.new.common import \
get_perflib_implementation
from numscons.checkers.new.common import \
write_configuration_results as write_info
from numscons.checkers.simple_check import \
NumpyCheckLibAndHeader
from numscons.checkers.fortran import *
from numscons.checkers import fortran
# Those are for compatibility only
def CheckCLAPACK(context, autoadd=1, check_version=0):
context.Message("Checking for CLAPACK ... ")
context.Result(0)
return 0
def IsVeclib(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'VECLIB'
def IsAccelerate(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ACCELERATE'
def IsATLAS(env, interface):
return get_perflib_implementation(env, interface.upper()) == 'ATLAS'
def GetATLASVersion(env):
return ''
__all__ = []
__all__ += ['CheckCBLAS', 'CheckF77LAPACK', 'CheckF77BLAS', 'CheckCLAPACK',
'write_info', 'IsVeclib', 'IsAccelerate', 'IsATLAS', 'GetATLASVersion']
__all__ += fortran.__all__
__all__ += ['NumpyCheckLibAndHeader']
|
f200d98547baef9ac2faa90d72857ffa0e64c721
|
IPython/nbconvert/exporters/python.py
|
IPython/nbconvert/exporters/python.py
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
mime_type = Unicode('text/x-python', config=True,
help="MIME type of the result file, for HTTP response headers."
)
|
Add MIME types to nbconvert exporters
|
Add MIME types to nbconvert exporters
|
Python
|
bsd-3-clause
|
cornhundred/ipywidgets,jupyter-widgets/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
Add MIME types to nbconvert exporters
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
mime_type = Unicode('text/x-python', config=True,
help="MIME type of the result file, for HTTP response headers."
)
|
<commit_before>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
<commit_msg>Add MIME types to nbconvert exporters<commit_after>
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
mime_type = Unicode('text/x-python', config=True,
help="MIME type of the result file, for HTTP response headers."
)
|
"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
Add MIME types to nbconvert exporters"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
mime_type = Unicode('text/x-python', config=True,
help="MIME type of the result file, for HTTP response headers."
)
|
<commit_before>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
<commit_msg>Add MIME types to nbconvert exporters<commit_after>"""Python script Exporter class"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, the IPython Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from IPython.utils.traitlets import Unicode
from .templateexporter import TemplateExporter
#-----------------------------------------------------------------------------
# Classes
#-----------------------------------------------------------------------------
class PythonExporter(TemplateExporter):
"""
Exports a Python code file.
"""
file_extension = Unicode(
'py', config=True,
help="Extension of the file that should be written to disk")
def _raw_mimetype_default(self):
return 'application/x-python'
mime_type = Unicode('text/x-python', config=True,
help="MIME type of the result file, for HTTP response headers."
)
|
fda1b41890ea338e992ddd8a23d9c6a497990ea2
|
fabfile/eg.py
|
fabfile/eg.py
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
Add fabric task for Quora example
|
Add fabric task for Quora example
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
Add fabric task for Quora example
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
<commit_msg>Add fabric task for Quora example<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
Add fabric task for Quora example# coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
<commit_before># coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
<commit_msg>Add fabric task for Quora example<commit_after># coding: utf-8
from __future__ import unicode_literals, print_function
from fabric.api import task, local, run, lcd, cd, env, shell_env
from fabtools.python import virtualenv
from _util import PWD, VENV_DIR
@task
def mnist():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/mnist_mlp.py')
@task
def basic_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/basic_tagger.py')
@task
def cnn_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/cnn_tagger.py')
@task
def quora():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('pip install spacy')
local('python -m spacy.en.download')
local('python examples/quora_similarity.py')
@task
def spacy_tagger():
with virtualenv(VENV_DIR), lcd(PWD), shell_env(PYTHONPATH=PWD):
local('python examples/spacy_tagger.py')
|
01662f2b2fdcc4ce1ee7e4a5480fbde8acaac5fd
|
ddcz/tests/test_integration/test_user.py
|
ddcz/tests/test_integration/test_user.py
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(200, res.status_code)
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(302, res.status_code)
|
Fix new expectations of the password reset
|
fix: Fix new expectations of the password reset
|
Python
|
mit
|
dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard,dracidoupe/graveyard
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(200, res.status_code)fix: Fix new expectations of the password reset
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(302, res.status_code)
|
<commit_before>from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(200, res.status_code)<commit_msg>fix: Fix new expectations of the password reset<commit_after>
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(302, res.status_code)
|
from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(200, res.status_code)fix: Fix new expectations of the password resetfrom django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(302, res.status_code)
|
<commit_before>from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(200, res.status_code)<commit_msg>fix: Fix new expectations of the password reset<commit_after>from django.test import Client, TestCase
from django.urls import reverse
from django.contrib.auth.models import User
from ddcz.models import UserProfile
class PasswordResetTestCase(TestCase):
fixtures = ['pages']
def setUp(self):
super().setUp()
self.client = Client()
self.valid_password = 'xoxo'
self.valid_email = 'test@example.com'
self.nick = 'integration test user'
self.valid_user = User.objects.create(
username = self.nick,
password = self.valid_password
)
self.valid_profile = UserProfile.objects.create(
nick_uzivatele = self.nick,
email_uzivatele = self.valid_email,
user = self.valid_user
)
def test_sending_form(self):
res = self.client.post(reverse('ddcz:password-reset'), {
'email': self.valid_email
})
self.assertEquals(302, res.status_code)
|
c604ace9394cdc1c0c0a3002cbb3d90dd64695f3
|
examples/mnist-classifier.py
|
examples/mnist-classifier.py
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
Save mnist classifier model in a file named with the network topology.
|
Save mnist classifier model in a file named with the network topology.
|
Python
|
mit
|
lmjohns3/theanets,chrinide/theanets,devdoer/theanets
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
Save mnist classifier model in a file named with the network topology.
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
<commit_before>#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
<commit_msg>Save mnist classifier model in a file named with the network topology.<commit_after>
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
Save mnist classifier model in a file named with the network topology.#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
<commit_before>#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier.pkl.gz')
Main().train().save(path)
print 'saved network to', path
<commit_msg>Save mnist classifier model in a file named with the network topology.<commit_after>#!/usr/bin/env python
import cPickle
import gzip
import logging
import os
import sys
import tempfile
import urllib
import lmj.tnn
logging.basicConfig(
stream=sys.stdout,
format='%(levelname).1s %(asctime)s %(message)s',
level=logging.INFO)
URL = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz'
DATASET = os.path.join(tempfile.gettempdir(), 'mnist.pkl.gz')
if not os.path.isfile(DATASET):
logging.info('downloading mnist digit dataset from %s' % URL)
urllib.urlretrieve(URL, DATASET)
logging.info('saved mnist digits to %s' % DATASET)
class Main(lmj.tnn.Main):
def get_network(self):
return lmj.tnn.Classifier
def get_datasets(self):
return [(x, y.astype('int32')) for x, y in cPickle.load(gzip.open(DATASET))]
m = Main()
path = os.path.join(tempfile.gettempdir(), 'mnist-classifier-%s.pkl.gz' % m.opts.layers)
if os.path.exists(path):
m.net.load(path)
m.train()
m.net.save(path)
|
e5083fd56caa271afbdbad1c59009f7e1ea465b3
|
content/app.py
|
content/app.py
|
from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
|
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
Return 404 when no content found.
|
Return 404 when no content found.
|
Python
|
bsd-3-clause
|
Zipmatch/zipmatch-content,Zipmatch/zipmatch-content
|
from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
Return 404 when no content found.
|
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
<commit_before>from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
<commit_msg>Return 404 when no content found.<commit_after>
|
from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
Return 404 when no content found.from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
<commit_before>from flask import Flask
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
return app
<commit_msg>Return 404 when no content found.<commit_after>from flask import Flask, jsonify
from botocore.exceptions import ClientError
from .extensions import envcfg, apierrors, applogging
from .blueprints.status import blueprint as status_bp
from .blueprints.content import blueprint as content_bp
from .blueprints.swagger import blueprint as swagger_bp
def create_app():
app = Flask('content')
app.config.from_object('content.default_settings')
envcfg.init_app(app)
applogging.init_app(app)
apierrors.init_app(app)
app.register_blueprint(status_bp)
app.register_blueprint(content_bp, url_prefix='/v1')
app.register_blueprint(swagger_bp)
app.register_error_handler(ClientError, _no_such_key)
return app
def _no_such_key(error):
# Boto3 exceptions are idiotic.
if error.response['Error']['Code'] != "NoSuchEntity":
return jsonify({'error': 'No such content'}), 404
else:
raise error
|
1b455898665ceedec330dea68e53ece4719b2898
|
cumulusci/utils/yaml/cumulusci_yml.py
|
cumulusci/utils/yaml/cumulusci_yml.py
|
from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
from typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
Improve error message and imports
|
Improve error message and imports
|
Python
|
bsd-3-clause
|
SalesforceFoundation/CumulusCI,SalesforceFoundation/CumulusCI
|
from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
Improve error message and imports
|
from typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
<commit_before>from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
<commit_msg>Improve error message and imports<commit_after>
|
from typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
Improve error message and importsfrom typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
<commit_before>from typing import IO, Text
from re import compile, MULTILINE
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = compile(r"^\s*[\u00A0]+\s*", MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted it from a Web page.\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
<commit_msg>Improve error message and imports<commit_after>from typing import IO, Text
import re
from logging import getLogger
from io import StringIO
import yaml
NBSP = "\u00A0"
pattern = re.compile(r"^\s*[\u00A0]+\s*", re.MULTILINE)
logger = getLogger(__name__)
def _replace_nbsp(origdata):
counter = 0
def _replacer_func(matchobj):
nonlocal counter
counter += 1
string = matchobj.group(0)
rc = string.replace(NBSP, " ")
return rc
data = pattern.sub(_replacer_func, origdata)
if counter:
plural = "s were" if counter > 1 else " was"
logger.warn(
f"Note: {counter} lines with non-breaking space character{plural} detected in cumulusci.yml.\n"
"Perhaps you cut and pasted from a Web page?\n"
"Future versions of CumulusCI may disallow these characters.\n"
)
return data
def cci_safe_load(f_config: IO[Text]):
"Load a file, convert NBSP->space and parse it in YAML."
data = _replace_nbsp(f_config.read())
rc = yaml.safe_load(StringIO(data))
return rc
|
3d00536041d52900a4ace5304b5b07eba4c11efb
|
wmt/flask/names/models.py
|
wmt/flask/names/models.py
|
#from flask_security import UserMixin, RoleMixin
from standard_names import StandardName
from ..core import db
class Name(db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
def to_resource(self, brief=False):
if brief:
return {'id': self.id, 'name': self.name}
else:
sn = StandardName(self.name)
return {
'id': self.id,
'href': '/api/names/%d' % self.id,
'name': self.name,
'object': sn.object,
'quantity': sn.quantity,
'operators': sn.operators,
}
|
from flask import url_for
from standard_names import StandardName
from ..core import db, JsonMixin
class NameJsonSerializer(JsonMixin):
__public_fields__ = set(['href', 'id', 'name', 'object', 'quantity',
'operators'])
class Name(NameJsonSerializer, db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
@property
def href(self):
return url_for('names.name', id=self.id)
@property
def object(self):
return StandardName(self.name).object
@property
def quantity(self):
return StandardName(self.name).quantity
@property
def operators(self):
return StandardName(self.name).operators
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
|
Use the JsonMixin for the names model.
|
Use the JsonMixin for the names model.
|
Python
|
mit
|
mcflugen/wmt-rest,mcflugen/wmt-rest
|
#from flask_security import UserMixin, RoleMixin
from standard_names import StandardName
from ..core import db
class Name(db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
def to_resource(self, brief=False):
if brief:
return {'id': self.id, 'name': self.name}
else:
sn = StandardName(self.name)
return {
'id': self.id,
'href': '/api/names/%d' % self.id,
'name': self.name,
'object': sn.object,
'quantity': sn.quantity,
'operators': sn.operators,
}
Use the JsonMixin for the names model.
|
from flask import url_for
from standard_names import StandardName
from ..core import db, JsonMixin
class NameJsonSerializer(JsonMixin):
__public_fields__ = set(['href', 'id', 'name', 'object', 'quantity',
'operators'])
class Name(NameJsonSerializer, db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
@property
def href(self):
return url_for('names.name', id=self.id)
@property
def object(self):
return StandardName(self.name).object
@property
def quantity(self):
return StandardName(self.name).quantity
@property
def operators(self):
return StandardName(self.name).operators
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
|
<commit_before>#from flask_security import UserMixin, RoleMixin
from standard_names import StandardName
from ..core import db
class Name(db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
def to_resource(self, brief=False):
if brief:
return {'id': self.id, 'name': self.name}
else:
sn = StandardName(self.name)
return {
'id': self.id,
'href': '/api/names/%d' % self.id,
'name': self.name,
'object': sn.object,
'quantity': sn.quantity,
'operators': sn.operators,
}
<commit_msg>Use the JsonMixin for the names model.<commit_after>
|
from flask import url_for
from standard_names import StandardName
from ..core import db, JsonMixin
class NameJsonSerializer(JsonMixin):
__public_fields__ = set(['href', 'id', 'name', 'object', 'quantity',
'operators'])
class Name(NameJsonSerializer, db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
@property
def href(self):
return url_for('names.name', id=self.id)
@property
def object(self):
return StandardName(self.name).object
@property
def quantity(self):
return StandardName(self.name).quantity
@property
def operators(self):
return StandardName(self.name).operators
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
|
#from flask_security import UserMixin, RoleMixin
from standard_names import StandardName
from ..core import db
class Name(db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
def to_resource(self, brief=False):
if brief:
return {'id': self.id, 'name': self.name}
else:
sn = StandardName(self.name)
return {
'id': self.id,
'href': '/api/names/%d' % self.id,
'name': self.name,
'object': sn.object,
'quantity': sn.quantity,
'operators': sn.operators,
}
Use the JsonMixin for the names model.from flask import url_for
from standard_names import StandardName
from ..core import db, JsonMixin
class NameJsonSerializer(JsonMixin):
__public_fields__ = set(['href', 'id', 'name', 'object', 'quantity',
'operators'])
class Name(NameJsonSerializer, db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
@property
def href(self):
return url_for('names.name', id=self.id)
@property
def object(self):
return StandardName(self.name).object
@property
def quantity(self):
return StandardName(self.name).quantity
@property
def operators(self):
return StandardName(self.name).operators
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
|
<commit_before>#from flask_security import UserMixin, RoleMixin
from standard_names import StandardName
from ..core import db
class Name(db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
def to_resource(self, brief=False):
if brief:
return {'id': self.id, 'name': self.name}
else:
sn = StandardName(self.name)
return {
'id': self.id,
'href': '/api/names/%d' % self.id,
'name': self.name,
'object': sn.object,
'quantity': sn.quantity,
'operators': sn.operators,
}
<commit_msg>Use the JsonMixin for the names model.<commit_after>from flask import url_for
from standard_names import StandardName
from ..core import db, JsonMixin
class NameJsonSerializer(JsonMixin):
__public_fields__ = set(['href', 'id', 'name', 'object', 'quantity',
'operators'])
class Name(NameJsonSerializer, db.Model):
__tablename__ = 'names'
__bind_key__ = 'names'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Text)
@property
def href(self):
return url_for('names.name', id=self.id)
@property
def object(self):
return StandardName(self.name).object
@property
def quantity(self):
return StandardName(self.name).quantity
@property
def operators(self):
return StandardName(self.name).operators
def __init__(self, name):
self.name = name
def __repr__(self):
return '<Name %r>' % self.name
|
2fac490ed8926bf04e396ded35340f880e9c49b6
|
wikilink/db/connection.py
|
wikilink/db/connection.py
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
else:
raise ValueError("db type only support \"mysql\" or \"postgresql\" argument.")
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
Add exception for wrong type of db
|
Add exception for wrong type of db
|
Python
|
apache-2.0
|
tranlyvu/findLink,tranlyvu/find-link
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)Add exception for wrong type of db
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
else:
raise ValueError("db type only support \"mysql\" or \"postgresql\" argument.")
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
<commit_before>from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)<commit_msg>Add exception for wrong type of db<commit_after>
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
else:
raise ValueError("db type only support \"mysql\" or \"postgresql\" argument.")
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)Add exception for wrong type of dbfrom sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
else:
raise ValueError("db type only support \"mysql\" or \"postgresql\" argument.")
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
<commit_before>from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)<commit_msg>Add exception for wrong type of db<commit_after>from sqlalchemy import create_engine
from sqlalchemy_utils import functions
from sqlalchemy.orm import sessionmaker
from .base import Base
class Connection:
def __init__(self, db, name, password, ip, port):
if db == "postgresql":
connection = "postgresql+psycopg2://" + name + ":" + password + "@" + ip + ":" + port
elif db == "mysql":
connection = "mysql://" + name + ":" + password + "@" + ip + ":" + port
else:
raise ValueError("db type only support \"mysql\" or \"postgresql\" argument.")
db_name = 'wikilink'
# Turn off echo
engine = create_engine(connection + "/" + db_name + '?charset=utf8', echo=False, encoding='utf-8')
if not functions.database_exists(engine.url):
functions.create_database(engine.url)
self.session = sessionmaker(bind=engine)()
# If table don't exist, Create.
if (not engine.dialect.has_table(engine, 'link') and not engine.dialect.has_table(engine, 'page')):
Base.metadata.create_all(engine)
|
d404b91cc7af75c343c78fe44273a8cff8aa5663
|
feincms/module/page/admin.py
|
feincms/module/page/admin.py
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
# XXX move this setting to feincms.settings?
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
Add a note concerning FEINCMS_USE_PAGE_ADMIN
|
Add a note concerning FEINCMS_USE_PAGE_ADMIN
|
Python
|
bsd-3-clause
|
joshuajonah/feincms,matthiask/feincms2-content,feincms/feincms,nickburlett/feincms,nickburlett/feincms,feincms/feincms,matthiask/django-content-editor,matthiask/django-content-editor,matthiask/django-content-editor,michaelkuty/feincms,feincms/feincms,michaelkuty/feincms,matthiask/feincms2-content,nickburlett/feincms,mjl/feincms,matthiask/feincms2-content,joshuajonah/feincms,joshuajonah/feincms,nickburlett/feincms,matthiask/django-content-editor,michaelkuty/feincms,joshuajonah/feincms,michaelkuty/feincms,mjl/feincms,mjl/feincms
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
Add a note concerning FEINCMS_USE_PAGE_ADMIN
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
# XXX move this setting to feincms.settings?
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
<commit_before># ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
<commit_msg>Add a note concerning FEINCMS_USE_PAGE_ADMIN<commit_after>
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
# XXX move this setting to feincms.settings?
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
Add a note concerning FEINCMS_USE_PAGE_ADMIN# ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
# XXX move this setting to feincms.settings?
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
<commit_before># ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
<commit_msg>Add a note concerning FEINCMS_USE_PAGE_ADMIN<commit_after># ------------------------------------------------------------------------
# coding=utf-8
# ------------------------------------------------------------------------
from __future__ import absolute_import
from django.conf import settings
from django.contrib import admin
from django.core.exceptions import ImproperlyConfigured
from django.db.models import FieldDoesNotExist
from feincms import ensure_completely_loaded
from .models import Page
from .modeladmins import PageAdmin
# ------------------------------------------------------------------------
# XXX move this setting to feincms.settings?
if getattr(settings, 'FEINCMS_USE_PAGE_ADMIN', True):
ensure_completely_loaded()
try:
Page._meta.get_field('template_key')
except FieldDoesNotExist:
raise ImproperlyConfigured(
"The page module requires a 'Page.register_templates()' call "
"somewhere ('Page.register_regions()' is not sufficient). "
"If you're not using the default Page admin, maybe try "
"FEINCMS_USE_PAGE_ADMIN=False to avoid this warning."
)
admin.site.register(Page, PageAdmin)
# ------------------------------------------------------------------------
# ------------------------------------------------------------------------
|
a8599728ea4b306776b4ba8aa92e333671571e4d
|
tensorflow_text/python/keras/layers/__init__.py
|
tensorflow_text/python/keras/layers/__init__.py
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
]
remove_undocumented(__name__, _allowed_symbols)
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
from tensorflow_text.python.keras.layers.tokenization_layers import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
"UnicodeScriptTokenizer",
"WhitespaceTokenizer",
"WordpieceTokenizer",
]
remove_undocumented(__name__, _allowed_symbols)
|
Add missing symbols for tokenization layers
|
Add missing symbols for tokenization layers
Tokenization layers are now exposed by adding them to the list of allowed symbols.
Cheers
|
Python
|
apache-2.0
|
tensorflow/text,tensorflow/text,tensorflow/text
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
]
remove_undocumented(__name__, _allowed_symbols)
Add missing symbols for tokenization layers
Tokenization layers are now exposed by adding them to the list of allowed symbols.
Cheers
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
from tensorflow_text.python.keras.layers.tokenization_layers import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
"UnicodeScriptTokenizer",
"WhitespaceTokenizer",
"WordpieceTokenizer",
]
remove_undocumented(__name__, _allowed_symbols)
|
<commit_before># coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
]
remove_undocumented(__name__, _allowed_symbols)
<commit_msg>Add missing symbols for tokenization layers
Tokenization layers are now exposed by adding them to the list of allowed symbols.
Cheers<commit_after>
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
from tensorflow_text.python.keras.layers.tokenization_layers import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
"UnicodeScriptTokenizer",
"WhitespaceTokenizer",
"WordpieceTokenizer",
]
remove_undocumented(__name__, _allowed_symbols)
|
# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
]
remove_undocumented(__name__, _allowed_symbols)
Add missing symbols for tokenization layers
Tokenization layers are now exposed by adding them to the list of allowed symbols.
Cheers# coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
from tensorflow_text.python.keras.layers.tokenization_layers import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
"UnicodeScriptTokenizer",
"WhitespaceTokenizer",
"WordpieceTokenizer",
]
remove_undocumented(__name__, _allowed_symbols)
|
<commit_before># coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
]
remove_undocumented(__name__, _allowed_symbols)
<commit_msg>Add missing symbols for tokenization layers
Tokenization layers are now exposed by adding them to the list of allowed symbols.
Cheers<commit_after># coding=utf-8
# Copyright 2021 TF.Text Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tensorflow Text layers for Keras API."""
from tensorflow.python.util.all_util import remove_undocumented
# pylint: disable=wildcard-import
from tensorflow_text.python.keras.layers.todense import *
from tensorflow_text.python.keras.layers.tokenization_layers import *
# Public symbols in the "tensorflow_text.layers" package.
_allowed_symbols = [
"ToDense",
"UnicodeScriptTokenizer",
"WhitespaceTokenizer",
"WordpieceTokenizer",
]
remove_undocumented(__name__, _allowed_symbols)
|
d9dce6f97019d688750c8143777d2c9e2acd4170
|
qtpy/QtOpenGLWidgets.py
|
qtpy/QtOpenGLWidgets.py
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
Fix wrong module name in error message
|
Fix wrong module name in error message
|
Python
|
mit
|
spyder-ide/qtpy
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
Fix wrong module name in error message
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
<commit_before># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
<commit_msg>Fix wrong module name in error message<commit_after>
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
Fix wrong module name in error message# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
<commit_before># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtTextToSpeech')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
<commit_msg>Fix wrong module name in error message<commit_after># -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtOpenGLWidgets classes and functions."""
from . import PYQT5, PYQT6, PYSIDE2, PYSIDE6, QtBindingsNotFoundError, QtBindingMissingModuleError
if PYQT5:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYQT6:
from PyQt6.QtOpenGLWidgets import *
elif PYSIDE2:
raise QtBindingMissingModuleError(name='QtOpenGLWidgets')
elif PYSIDE6:
from PySide6.QtOpenGLWidgets import *
else:
raise QtBindingsNotFoundError()
|
2021cdbe3304c91af03d9664e05c9bbc1a197f4d
|
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
|
python/ql/test/experimental/library-tests/frameworks/yaml/Decoding.py
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML SPURIOUS: decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.safe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.safe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
|
Add tests for more yaml loading functions
|
Python: Add tests for more yaml loading functions
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
Python: Add tests for more yaml loading functions
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML SPURIOUS: decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.safe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.safe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
|
<commit_before>import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
<commit_msg>Python: Add tests for more yaml loading functions<commit_after>
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML SPURIOUS: decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.safe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.safe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
|
import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
Python: Add tests for more yaml loading functionsimport yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML SPURIOUS: decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.safe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.safe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
|
<commit_before>import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
<commit_msg>Python: Add tests for more yaml loading functions<commit_after>import yaml
from yaml import SafeLoader
yaml.load(payload) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load(payload, SafeLoader) # $ decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML SPURIOUS: decodeMayExecuteInput
yaml.load(payload, Loader=SafeLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.load(payload, Loader=yaml.BaseLoader) # $decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.safe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.safe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML
yaml.unsafe_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
yaml.full_load_all(payload) # $ MISSING: decodeInput=payload decodeOutput=Attribute() decodeFormat=YAML decodeMayExecuteInput
|
b535dcc490f56a54b92443172ad0b5828bc5a540
|
rpcd/playbooks/roles/horizon_extensions/templates/_50_rackspace.py
|
rpcd/playbooks/roles/horizon_extensions/templates/_50_rackspace.py
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
Fix enabled file installed from horizon-extensions
|
Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891
|
Python
|
apache-2.0
|
cfarquhar/rpc-openstack,galstrom21/rpc-openstack,mancdaz/rpc-openstack,sigmavirus24/rpc-openstack,cloudnull/rpc-openstack,major/rpc-openstack,darrenchan/rpc-openstack,cfarquhar/rpc-openstack,mancdaz/rpc-openstack,git-harry/rpc-openstack,darrenchan/rpc-openstack,xeregin/rpc-openstack,prometheanfire/rpc-openstack,robb-romans/rpc-openstack,shannonmitchell/rpc-openstack,rcbops/rpc-openstack,darrenchan/rpc-openstack,sigmavirus24/rpc-openstack,rcbops/rpc-openstack,git-harry/rpc-openstack,jacobwagner/rpc-openstack,byronmccollum/rpc-openstack,sigmavirus24/rpc-openstack,byronmccollum/rpc-openstack,xeregin/rpc-openstack,byronmccollum/rpc-openstack,sigmavirus24/rpc-openstack,xeregin/rpc-openstack,hughsaunders/rpc-openstack,galstrom21/rpc-openstack,cloudnull/rpc-openstack,BjoernT/rpc-openstack,major/rpc-openstack,shannonmitchell/rpc-openstack,xeregin/rpc-openstack,darrenchan/rpc-openstack,robb-romans/rpc-openstack,prometheanfire/rpc-openstack,BjoernT/rpc-openstack,jacobwagner/rpc-openstack,hughsaunders/rpc-openstack
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
<commit_before>DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
<commit_msg>Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891<commit_after>
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
<commit_before>DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
<commit_msg>Fix enabled file installed from horizon-extensions
Add the angularjs module containing the Rackspace Solutions
panel code to the Horizon application so it works.
Requires accompanying patch
https://github.com/rcbops/horizon-extensions/pull/7
for the panel to work with this change.
closes 891<commit_after>DASHBOARD = 'rackspace'
ADD_INSTALLED_APPS = [
'rackspace',
]
ADD_ANGULAR_MODULES = ['horizon.dashboard.rackspace']
# If set to True, this dashboard will not be added to the settings.
DISABLED = False
|
436bc4a7c35f6832e25cdf427f71acc0f2d8e54f
|
dadd/master/utils.py
|
dadd/master/utils.py
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'HOST' in os.environ:
app.config['HOST'] = os.environ['HOST']
if 'PORT' in os.environ:
app.config['PORT'] = os.environ['PORT']
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
Add the host and port from the env to the config.
|
Add the host and port from the env to the config.
|
Python
|
bsd-3-clause
|
ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
Add the host and port from the env to the config.
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'HOST' in os.environ:
app.config['HOST'] = os.environ['HOST']
if 'PORT' in os.environ:
app.config['PORT'] = os.environ['PORT']
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
<commit_before>import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
<commit_msg>Add the host and port from the env to the config.<commit_after>
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'HOST' in os.environ:
app.config['HOST'] = os.environ['HOST']
if 'PORT' in os.environ:
app.config['PORT'] = os.environ['PORT']
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
Add the host and port from the env to the config.import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'HOST' in os.environ:
app.config['HOST'] = os.environ['HOST']
if 'PORT' in os.environ:
app.config['PORT'] = os.environ['PORT']
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
<commit_before>import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
<commit_msg>Add the host and port from the env to the config.<commit_after>import os
import requests
import yaml
def get_session():
sess = requests.Session()
# Unused currently
# sess.auth = (app.config['USERNAME'], app.config['PASSWORD'])
sess.headers = {'content-type': 'application/json'}
return sess
def update_config(app):
if 'HOST' in os.environ:
app.config['HOST'] = os.environ['HOST']
if 'PORT' in os.environ:
app.config['PORT'] = os.environ['PORT']
if 'DEBUG' in os.environ:
app.debug = True
if os.environ.get('APP_SETTINGS_YAML'):
config = yaml.safe_load(open(os.environ['APP_SETTINGS_YAML']))
app.config.update(config)
|
3d5eaf13597bd7cab5dc09e1030b803701f0872f
|
genda/genders/models.py
|
genda/genders/models.py
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = __str__
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = __str__
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = lambda self: '<{}>'.format(self.__str__())
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
def __str__(self):
return '<{} prefers {}>'.format(
self.user.username, self.default_pronoun
)
__repr__ = __str__
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = lambda self: '<{}>'.format(self.__str__())
|
Correct __str__ & __repr__ implementations
|
Correct __str__ & __repr__ implementations
|
Python
|
mit
|
Mause/Genda,Mause/Genda
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = __str__
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = __str__
Correct __str__ & __repr__ implementations
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = lambda self: '<{}>'.format(self.__str__())
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
def __str__(self):
return '<{} prefers {}>'.format(
self.user.username, self.default_pronoun
)
__repr__ = __str__
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = lambda self: '<{}>'.format(self.__str__())
|
<commit_before>from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = __str__
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = __str__
<commit_msg>Correct __str__ & __repr__ implementations<commit_after>
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = lambda self: '<{}>'.format(self.__str__())
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
def __str__(self):
return '<{} prefers {}>'.format(
self.user.username, self.default_pronoun
)
__repr__ = __str__
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = lambda self: '<{}>'.format(self.__str__())
|
from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = __str__
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = __str__
Correct __str__ & __repr__ implementationsfrom django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = lambda self: '<{}>'.format(self.__str__())
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
def __str__(self):
return '<{} prefers {}>'.format(
self.user.username, self.default_pronoun
)
__repr__ = __str__
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = lambda self: '<{}>'.format(self.__str__())
|
<commit_before>from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = __str__
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = __str__
<commit_msg>Correct __str__ & __repr__ implementations<commit_after>from django.db import models
from django.conf import settings
class Gender(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
__repr__ = lambda self: '<{}>'.format(self.__str__())
class UserToPronoun(models.Model):
email_hash = models.CharField(max_length=32)
user = models.ForeignKey(settings.AUTH_USER_MODEL, unique=True)
default_pronoun = models.ForeignKey('Pronoun')
default_gender = models.ForeignKey('Gender', null=True)
def __str__(self):
return '<{} prefers {}>'.format(
self.user.username, self.default_pronoun
)
__repr__ = __str__
class Pronoun(models.Model):
object_word = models.CharField(max_length=10) # them
subject_word = models.CharField(max_length=10) # they
self_word = models.CharField(max_length=10) # themself
owner_word = models.CharField(max_length=10) # their
is_custom = models.BooleanField(default=True)
def __str__(self):
return '{}/{}/{}/{}'.format(
self.object_word,
self.subject_word,
self.self_word,
self.owner_word
)
__repr__ = lambda self: '<{}>'.format(self.__str__())
|
4c0ba059c49ead3394ccff0351841ab50b15b712
|
app/settings/prod.py
|
app/settings/prod.py
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
Remove the scheme from the domain
|
fix: Remove the scheme from the domain
|
Python
|
agpl-3.0
|
agendaodonto/server,agendaodonto/server
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
fix: Remove the scheme from the domain
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
<commit_before>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
<commit_msg>fix: Remove the scheme from the domain<commit_after>
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
fix: Remove the scheme from the domainimport dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
<commit_before>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'https://agendaodonto.com',
'https://backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
<commit_msg>fix: Remove the scheme from the domain<commit_after>import dj_database_url
from .default import *
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', None)
ALLOWED_HOSTS = ['agendaodonto.herokuapp.com']
DATABASES = {'default': dj_database_url.config()}
CORS_ORIGIN_WHITELIST = (
'agendaodonto.com',
'backend.agendaodonto.com',
)
DJOSER['DOMAIN'] = 'agendaodonto.com'
# Celery Settings
CELERY_BROKER_URL = os.getenv('RABBITMQ_URL', None)
CELERY_BROKER_HEARTBEAT = None
|
5354a39d62edc12cd5dbea6b1912bf6bdf846999
|
test_migrations/migrate_test/app/models.py
|
test_migrations/migrate_test/app/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ))
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'))
def __str__(self):
return self.title
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ), virtual_fields=False)
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'), virtual_fields=False)
def __str__(self):
return self.title
|
Disable adding virtual fields during migration
|
Disable adding virtual fields during migration
|
Python
|
bsd-3-clause
|
zostera/django-modeltrans,zostera/django-modeltrans
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ))
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'))
def __str__(self):
return self.title
Disable adding virtual fields during migration
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ), virtual_fields=False)
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'), virtual_fields=False)
def __str__(self):
return self.title
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ))
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'))
def __str__(self):
return self.title
<commit_msg>Disable adding virtual fields during migration<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ), virtual_fields=False)
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'), virtual_fields=False)
def __str__(self):
return self.title
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ))
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'))
def __str__(self):
return self.title
Disable adding virtual fields during migration# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ), virtual_fields=False)
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'), virtual_fields=False)
def __str__(self):
return self.title
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ))
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'))
def __str__(self):
return self.title
<commit_msg>Disable adding virtual fields during migration<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models
# from modeltrans.fields import TranslationField
class Category(models.Model):
name = models.CharField(max_length=255)
# i18n = TranslationField(fields=('name', ), virtual_fields=False)
class Meta:
verbose_name_plural = 'categories'
def __str__(self):
return self.name
class Blog(models.Model):
title = models.CharField(max_length=255)
body = models.TextField(null=True, blank=True)
category = models.ForeignKey(Category, null=True, blank=True)
# i18n = TranslationField(fields=('title', 'body'), virtual_fields=False)
def __str__(self):
return self.title
|
c7514e73eff70514659db9ff27aaccf50e99c4c5
|
account_wallet/models/account_move.py
|
account_wallet/models/account_move.py
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountInvoice(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
def invoice_line_move_line_get(self):
"""
Create move line with cagnotte id if needed
:return:
"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
wallet_lines = self.invoice_line_ids.filtered("account_cagnotte_id")
for line_val in res:
invl_id = line_val.get("invl_id")
if invl_id in wallet_lines.ids:
line_val.update({
"account_cagnotte_id": wallet_lines.filtered(
lambda c, l_id=invl_id: c.id == l_id).mapped(
"account_wallet_id").id})
return res
@api.model
def line_get_convert(self, line, part):
res = super(AccountInvoice, self).line_get_convert(line, part)
wallet_id = line.get("account_cagnotte_id")
if wallet_id:
res.update({"account_wallet_id": wallet_id})
return res
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
|
Remove former methods as models have been merged
|
[14.0][IMP] account_wallet: Remove former methods as models have been merged
|
Python
|
agpl-3.0
|
acsone/acsone-addons,acsone/acsone-addons,acsone/acsone-addons
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountInvoice(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
def invoice_line_move_line_get(self):
"""
Create move line with cagnotte id if needed
:return:
"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
wallet_lines = self.invoice_line_ids.filtered("account_cagnotte_id")
for line_val in res:
invl_id = line_val.get("invl_id")
if invl_id in wallet_lines.ids:
line_val.update({
"account_cagnotte_id": wallet_lines.filtered(
lambda c, l_id=invl_id: c.id == l_id).mapped(
"account_wallet_id").id})
return res
@api.model
def line_get_convert(self, line, part):
res = super(AccountInvoice, self).line_get_convert(line, part)
wallet_id = line.get("account_cagnotte_id")
if wallet_id:
res.update({"account_wallet_id": wallet_id})
return res
[14.0][IMP] account_wallet: Remove former methods as models have been merged
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
|
<commit_before># © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountInvoice(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
def invoice_line_move_line_get(self):
"""
Create move line with cagnotte id if needed
:return:
"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
wallet_lines = self.invoice_line_ids.filtered("account_cagnotte_id")
for line_val in res:
invl_id = line_val.get("invl_id")
if invl_id in wallet_lines.ids:
line_val.update({
"account_cagnotte_id": wallet_lines.filtered(
lambda c, l_id=invl_id: c.id == l_id).mapped(
"account_wallet_id").id})
return res
@api.model
def line_get_convert(self, line, part):
res = super(AccountInvoice, self).line_get_convert(line, part)
wallet_id = line.get("account_cagnotte_id")
if wallet_id:
res.update({"account_wallet_id": wallet_id})
return res
<commit_msg>[14.0][IMP] account_wallet: Remove former methods as models have been merged<commit_after>
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
|
# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountInvoice(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
def invoice_line_move_line_get(self):
"""
Create move line with cagnotte id if needed
:return:
"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
wallet_lines = self.invoice_line_ids.filtered("account_cagnotte_id")
for line_val in res:
invl_id = line_val.get("invl_id")
if invl_id in wallet_lines.ids:
line_val.update({
"account_cagnotte_id": wallet_lines.filtered(
lambda c, l_id=invl_id: c.id == l_id).mapped(
"account_wallet_id").id})
return res
@api.model
def line_get_convert(self, line, part):
res = super(AccountInvoice, self).line_get_convert(line, part)
wallet_id = line.get("account_cagnotte_id")
if wallet_id:
res.update({"account_wallet_id": wallet_id})
return res
[14.0][IMP] account_wallet: Remove former methods as models have been merged# © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
|
<commit_before># © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountInvoice(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
def invoice_line_move_line_get(self):
"""
Create move line with cagnotte id if needed
:return:
"""
res = super(AccountInvoice, self).invoice_line_move_line_get()
wallet_lines = self.invoice_line_ids.filtered("account_cagnotte_id")
for line_val in res:
invl_id = line_val.get("invl_id")
if invl_id in wallet_lines.ids:
line_val.update({
"account_cagnotte_id": wallet_lines.filtered(
lambda c, l_id=invl_id: c.id == l_id).mapped(
"account_wallet_id").id})
return res
@api.model
def line_get_convert(self, line, part):
res = super(AccountInvoice, self).line_get_convert(line, part)
wallet_id = line.get("account_cagnotte_id")
if wallet_id:
res.update({"account_wallet_id": wallet_id})
return res
<commit_msg>[14.0][IMP] account_wallet: Remove former methods as models have been merged<commit_after># © 2015 Laetitia Gangloff, Acsone SA/NV (http://www.acsone.eu)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import api, fields, models
class AccountMove(models.Model):
_inherit = "account.move"
account_wallet_type_id = fields.Many2one(
comodel_name='account.wallet.type',
string='Wallet type',
readonly=True,
ondelete='restrict',
help="Use this field to give coupon to a customer",
states={'draft': [('readonly', False)]},
)
@api.onchange("account_wallet_type_id")
def onchange_account_wallet_type_id(self):
if self.account_wallet_type_id:
self.account_id = self.account_wallet_type_id.account_id
|
a3bf9240424700f21b1e89b4663ca4e5c12d78ef
|
django_yadt/utils.py
|
django_yadt/utils.py
|
from django.db import models
from django.core.management.base import CommandError
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
|
import os
from django.db import models
from django.core.management.base import CommandError
from .fields import IMAGE_VARIANTS
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
def get_variant_from_path(path):
for variant in IMAGE_VARIANTS:
# Append '' so we don't accidentally match a prefix
dirname = os.path.join(variant.field.upload_to, variant.name, '')
if path.startswith(dirname):
return variant
return None
|
Add a utility for local installations to use the fallback mechanism too.
|
Add a utility for local installations to use the fallback mechanism too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
Python
|
bsd-3-clause
|
lamby/django-yadt,thread/django-yadt
|
from django.db import models
from django.core.management.base import CommandError
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
Add a utility for local installations to use the fallback mechanism too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>
|
import os
from django.db import models
from django.core.management.base import CommandError
from .fields import IMAGE_VARIANTS
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
def get_variant_from_path(path):
for variant in IMAGE_VARIANTS:
# Append '' so we don't accidentally match a prefix
dirname = os.path.join(variant.field.upload_to, variant.name, '')
if path.startswith(dirname):
return variant
return None
|
<commit_before>from django.db import models
from django.core.management.base import CommandError
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
<commit_msg>Add a utility for local installations to use the fallback mechanism too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>
|
import os
from django.db import models
from django.core.management.base import CommandError
from .fields import IMAGE_VARIANTS
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
def get_variant_from_path(path):
for variant in IMAGE_VARIANTS:
# Append '' so we don't accidentally match a prefix
dirname = os.path.join(variant.field.upload_to, variant.name, '')
if path.startswith(dirname):
return variant
return None
|
from django.db import models
from django.core.management.base import CommandError
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
Add a utility for local installations to use the fallback mechanism too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com>import os
from django.db import models
from django.core.management.base import CommandError
from .fields import IMAGE_VARIANTS
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
def get_variant_from_path(path):
for variant in IMAGE_VARIANTS:
# Append '' so we don't accidentally match a prefix
dirname = os.path.join(variant.field.upload_to, variant.name, '')
if path.startswith(dirname):
return variant
return None
|
<commit_before>from django.db import models
from django.core.management.base import CommandError
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
<commit_msg>Add a utility for local installations to use the fallback mechanism too.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@thread.com><commit_after>import os
from django.db import models
from django.core.management.base import CommandError
from .fields import IMAGE_VARIANTS
def get_variant(app_label, model_name, field_name, variant_name):
model = models.get_model(app_label, model_name)
if model is None:
raise CommandError("%s.%s is not a valid model name" % (
app_label,
model_name,
))
try:
field = getattr(model, field_name)
except AttributeError:
raise CommandError("%s.%s has no field %s" % (
app_label,
model_name,
field_name,
))
try:
return getattr(field, variant_name)
except AttributeError:
raise CommandError("%s.%s.%s has no variant %s" % (
app_label,
model_name,
field_name,
variant_name,
))
def get_variant_from_path(path):
for variant in IMAGE_VARIANTS:
# Append '' so we don't accidentally match a prefix
dirname = os.path.join(variant.field.upload_to, variant.name, '')
if path.startswith(dirname):
return variant
return None
|
b16bd59125fc5a800f5806f713fda3da4446d73c
|
pokemongo_bot/cell_workers/utils.py
|
pokemongo_bot/cell_workers/utils.py
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print('\033[92m' + message + '\033[0m');
def print_yellow(message):
print('\033[93m' + message + '\033[0m');
def print_red(message):
print('\033[91m' + message + '\033[0m');
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print(u'\033[92m' + message.decode('utf-8') + '\033[0m');
def print_yellow(message):
print(u'\033[93m' + message.decode('utf-8') + '\033[0m');
def print_red(message):
print(u'\033[91m' + message.decode('utf-8') + '\033[0m');
|
Fix encoding error when printing messages
|
Fix encoding error when printing messages
Some messages that will be printed will contain utf-8 chars, e.g. Pokestops in European locations.
|
Python
|
mit
|
joergpatz/PokemonGo-Bot,dtee/PokemonGo-Bot,lythien/pokemongo,codybaldwin/PokemonGo-Bot,dhluong90/PokemonGo-Bot,dtee/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,tibotic/simple-pokemongo-bot,yahwes/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,halsafar/PokemonGo-Bot,bbiiggppiigg/PokemonGo-Bot,heihachi/PokemonGo-Bot,sinistance/PokemonGo-Bot,Shoh/PokemonGo-Bot,dmateusp/PokemonGo-Bot,sinistance/PokemonGo-Bot,bbiiggppiigg/PokemonGo-Bot,goedzo/PokemonGo-Bot,Shoh/PokemonGo-Bot,AMiketta/PokemonGo-Bot,pokemongo-dev/PokemonGo-Bot,cmezh/PokemonGo-Bot,DayBr3ak/PokemonGo-Bot,joergpatz/PokemonGo-Bot,heihachi/PokemonGo-Bot,goedzo/PokemonGo-Bot,lythien/pokemongo,Quantra/PokemonGo-Bot,lythien/pokemongo,cmezh/PokemonGo-Bot,halsafar/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,reddivision/PokemonGo-Bot,AbelIngrand/PokemonGo-Bot,DBa2016/PokemonGo-Bot,bbiiggppiigg/PokemonGo-Bot,jasonliu119/PokemonGo-Bot,AcorpBG/PokemonGo-Bot,chadsaun/PokemonGo-Bot,AcorpBG/PokemonGo-Bot,codybaldwin/PokemonGo-Bot,goshan/PokemonGo-Bot,jasonliu119/PokemonGo-Bot,dhluong90/PokemonGo-Bot,yahwes/PokemonGo-Bot,Lordness/poklord,geminiyellow/PokemonGo-Bot,reddivision/PokemonGo-Bot,halsafar/PokemonGo-Bot,Compjeff/PokemonGo-Bot,bbiiggppiigg/PokemonGo-Bot,AMiketta/PokemonGo-Bot,tibotic/simple-pokemongo-bot,geminiyellow/PokemonGo-Bot,AbelIngrand/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,cmezh/PokemonGo-Bot,pokemongo-dev/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,DBa2016/PokemonGo-Bot,dtee/PokemonGo-Bot,cmezh/PokemonGo-Bot,heihachi/PokemonGo-Bot,Quantra/PokemonGo-Bot,Compjeff/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,heihachi/PokemonGo-Bot,earthchie/PokemonGo-Bot,halsafar/PokemonGo-Bot,Moonlight-Angel/PokemonGo-Bot,DBa2016/PokemonGo-Bot,dtee/PokemonGo-Bot,dhluong90/PokemonGo-Bot,goedzo/PokemonGo-Bot,earthchie/PokemonGo-Bot,DBa2016/PokemonGo-Bot,chadsaun/PokemonGo-Bot,Lordness/poklord,Moonlight-Angel/PokemonGo-Bot,dhluong90/PokemonGo-Bot,dmateusp/PokemonGo-Bot,DayBr3ak/PokemonGo-Bot,goshan/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print('\033[92m' + message + '\033[0m');
def print_yellow(message):
print('\033[93m' + message + '\033[0m');
def print_red(message):
print('\033[91m' + message + '\033[0m');
Fix encoding error when printing messages
Some messages that will be printed will contain utf-8 chars, e.g. Pokestops in European locations.
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print(u'\033[92m' + message.decode('utf-8') + '\033[0m');
def print_yellow(message):
print(u'\033[93m' + message.decode('utf-8') + '\033[0m');
def print_red(message):
print(u'\033[91m' + message.decode('utf-8') + '\033[0m');
|
<commit_before># -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print('\033[92m' + message + '\033[0m');
def print_yellow(message):
print('\033[93m' + message + '\033[0m');
def print_red(message):
print('\033[91m' + message + '\033[0m');
<commit_msg>Fix encoding error when printing messages
Some messages that will be printed will contain utf-8 chars, e.g. Pokestops in European locations.<commit_after>
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print(u'\033[92m' + message.decode('utf-8') + '\033[0m');
def print_yellow(message):
print(u'\033[93m' + message.decode('utf-8') + '\033[0m');
def print_red(message):
print(u'\033[91m' + message.decode('utf-8') + '\033[0m');
|
# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print('\033[92m' + message + '\033[0m');
def print_yellow(message):
print('\033[93m' + message + '\033[0m');
def print_red(message):
print('\033[91m' + message + '\033[0m');
Fix encoding error when printing messages
Some messages that will be printed will contain utf-8 chars, e.g. Pokestops in European locations.# -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print(u'\033[92m' + message.decode('utf-8') + '\033[0m');
def print_yellow(message):
print(u'\033[93m' + message.decode('utf-8') + '\033[0m');
def print_red(message):
print(u'\033[91m' + message.decode('utf-8') + '\033[0m');
|
<commit_before># -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print('\033[92m' + message + '\033[0m');
def print_yellow(message):
print('\033[93m' + message + '\033[0m');
def print_red(message):
print('\033[91m' + message + '\033[0m');
<commit_msg>Fix encoding error when printing messages
Some messages that will be printed will contain utf-8 chars, e.g. Pokestops in European locations.<commit_after># -*- coding: utf-8 -*-
import struct
from math import cos, asin, sqrt
def distance(lat1, lon1, lat2, lon2):
p = 0.017453292519943295
a = 0.5 - cos((lat2 - lat1) * p)/2 + cos(lat1 * p) * cos(lat2 * p) * (1 - cos((lon2 - lon1) * p)) / 2
return 12742 * asin(sqrt(a)) * 1000
def i2f(int):
return struct.unpack('<d', struct.pack('<Q', int))[0]
def print_green(message):
print(u'\033[92m' + message.decode('utf-8') + '\033[0m');
def print_yellow(message):
print(u'\033[93m' + message.decode('utf-8') + '\033[0m');
def print_red(message):
print(u'\033[91m' + message.decode('utf-8') + '\033[0m');
|
a1f93a76782b0bf406a16d36f0f60aea8b855566
|
cogs/points.py
|
cogs/points.py
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole == therole:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole:
if arole[0].name == therole.name:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
Fix getmembers command to get role instead of top role.
|
Fix getmembers command to get role instead of top role.
|
Python
|
agpl-3.0
|
freiheit/Bay-Oh-Woolph,dark-echo/Bay-Oh-Woolph
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole == therole:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
Fix getmembers command to get role instead of top role.
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole:
if arole[0].name == therole.name:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
<commit_before>from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole == therole:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
<commit_msg>Fix getmembers command to get role instead of top role.<commit_after>
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole:
if arole[0].name == therole.name:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole == therole:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
Fix getmembers command to get role instead of top role.from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole:
if arole[0].name == therole.name:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
<commit_before>from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole == therole:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
<commit_msg>Fix getmembers command to get role instead of top role.<commit_after>from discord.ext import commands
from utils import *
import discord
import asyncio
import sqlite3
from member import Member
class Points:
def __init__(self,bot):
self.bot = bot
#Test method to populate an array from discord -Infinite
@commands.command()
@commands.has_role('Leadership')
@asyncio.coroutine
def getmembers(self, role1 : discord.Role=None):
therole = role1
#Typing function
yield from self.bot.type()
#Intialize array
listOfMembers = []
#Add members to array
for amember in self.bot.get_all_members():
arole = [role for role in amember.roles if role == therole]
if arole:
if arole[0].name == therole.name:
listOfMembers.append(Member(int(amember.id),str(amember.name),str(amember.nick),str(amember.top_role),0))
length = len(listOfMembers)
yield from self.bot.say("Number of " + str(therole) + "s in array: " + str(length))
def setup(bot):
bot.add_cog(Points(bot))
|
8603d5e83f1eeac84990cb5353b166dd35fa8140
|
cyder/base/eav/forms.py
|
cyder/base/eav/forms.py
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'initial' not in kwargs:
kwargs['initial'] = dict()
kwargs['initial']['attribute'] = kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
Fix EAV creation form; fix form error bug
|
Fix EAV creation form; fix form error bug
|
Python
|
bsd-3-clause
|
drkitty/cyder,OSU-Net/cyder,OSU-Net/cyder,zeeman/cyder,akeym/cyder,OSU-Net/cyder,akeym/cyder,zeeman/cyder,zeeman/cyder,murrown/cyder,murrown/cyder,OSU-Net/cyder,drkitty/cyder,murrown/cyder,zeeman/cyder,drkitty/cyder,murrown/cyder,akeym/cyder,akeym/cyder,drkitty/cyder
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'initial' not in kwargs:
kwargs['initial'] = dict()
kwargs['initial']['attribute'] = kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
Fix EAV creation form; fix form error bug
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
<commit_before>from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'initial' not in kwargs:
kwargs['initial'] = dict()
kwargs['initial']['attribute'] = kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
<commit_msg>Fix EAV creation form; fix form error bug<commit_after>
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'initial' not in kwargs:
kwargs['initial'] = dict()
kwargs['initial']['attribute'] = kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
Fix EAV creation form; fix form error bugfrom django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
<commit_before>from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'initial' not in kwargs:
kwargs['initial'] = dict()
kwargs['initial']['attribute'] = kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
<commit_msg>Fix EAV creation form; fix form error bug<commit_after>from django import forms
from django.core.exceptions import ValidationError
from cyder.base.eav.models import Attribute
class AttributeFormField(forms.CharField):
def to_python(self, value):
try:
return Attribute.objects.get(
name=value)
except Attribute.DoesNotExist:
raise ValidationError("No such attribute")
def get_eav_form(eav_model, entity_model):
class EAVForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
if 'instance' in kwargs and kwargs['instance'] is not None:
# This is a bound form with a real instance
if 'initial' not in kwargs:
kwargs['initial'] = dict()
# Set the attribute field to the name, not the pk
kwargs['initial']['attribute'] = \
kwargs['instance'].attribute.name
super(EAVForm, self).__init__(*args, **kwargs)
entity = forms.ModelChoiceField(
queryset=entity_model.objects.all(),
widget=forms.HiddenInput())
attribute = AttributeFormField()
class Meta:
model = eav_model
fields = ('entity', 'attribute', 'value')
return EAVForm
|
b4c7a8d35d94f767154da44509a77010b585fe13
|
daiquiri/query/views.py
|
daiquiri/query/views.py
|
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(AnonymousAccessMixin, TemplateView):
template_name = 'query/jobs.html'
anonymous_setting = 'QUERY_ANONYMOUS'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(LoginRequiredMixin, TemplateView):
template_name = 'query/jobs.html'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
Disable jobs overview for anonymous users
|
Disable jobs overview for anonymous users
|
Python
|
apache-2.0
|
aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri
|
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(AnonymousAccessMixin, TemplateView):
template_name = 'query/jobs.html'
anonymous_setting = 'QUERY_ANONYMOUS'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
Disable jobs overview for anonymous users
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(LoginRequiredMixin, TemplateView):
template_name = 'query/jobs.html'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
<commit_before>from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(AnonymousAccessMixin, TemplateView):
template_name = 'query/jobs.html'
anonymous_setting = 'QUERY_ANONYMOUS'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
<commit_msg>Disable jobs overview for anonymous users<commit_after>
|
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(LoginRequiredMixin, TemplateView):
template_name = 'query/jobs.html'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(AnonymousAccessMixin, TemplateView):
template_name = 'query/jobs.html'
anonymous_setting = 'QUERY_ANONYMOUS'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
Disable jobs overview for anonymous usersfrom django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(LoginRequiredMixin, TemplateView):
template_name = 'query/jobs.html'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
<commit_before>from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(AnonymousAccessMixin, TemplateView):
template_name = 'query/jobs.html'
anonymous_setting = 'QUERY_ANONYMOUS'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
<commit_msg>Disable jobs overview for anonymous users<commit_after>from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import TemplateView
from daiquiri.core.views import ModelPermissionMixin, AnonymousAccessMixin
from daiquiri.core.utils import get_model_field_meta
from .models import QueryJob, Example
class QueryView(AnonymousAccessMixin, TemplateView):
template_name = 'query/query.html'
anonymous_setting = 'QUERY_ANONYMOUS'
class JobsView(LoginRequiredMixin, TemplateView):
template_name = 'query/jobs.html'
def get_context_data(self, **kwargs):
context = super(JobsView, self).get_context_data(**kwargs)
context['phases'] = QueryJob.PHASE_CHOICES
return context
class ExamplesView(ModelPermissionMixin, TemplateView):
template_name = 'query/examples.html'
permission_required = 'daiquiri_query.view_example'
def get_context_data(self, **kwargs):
context = super(ExamplesView, self).get_context_data(**kwargs)
context['meta'] = {
'Example': get_model_field_meta(Example)
}
return context
|
d2eae3e1d5041d5dcd967375cfe3a57079f97c22
|
builder/destroyer.py
|
builder/destroyer.py
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r['kind'] == 'server_create':
created_servers.add(r.server.name)
if r['kind'] == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r.kind == 'server_create':
created_servers.add(r.server.name)
if r.kind == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
Use attr access vs key
|
Use attr access vs key
|
Python
|
apache-2.0
|
harlowja/multi-devstack,harlowja/multi-devstack
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r['kind'] == 'server_create':
created_servers.add(r.server.name)
if r['kind'] == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
Use attr access vs key
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r.kind == 'server_create':
created_servers.add(r.server.name)
if r.kind == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
<commit_before>import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r['kind'] == 'server_create':
created_servers.add(r.server.name)
if r['kind'] == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
<commit_msg>Use attr access vs key<commit_after>
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r.kind == 'server_create':
created_servers.add(r.server.name)
if r.kind == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r['kind'] == 'server_create':
created_servers.add(r.server.name)
if r['kind'] == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
Use attr access vs keyimport yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r.kind == 'server_create':
created_servers.add(r.server.name)
if r.kind == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
<commit_before>import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r['kind'] == 'server_create':
created_servers.add(r.server.name)
if r['kind'] == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
<commit_msg>Use attr access vs key<commit_after>import yaml
from builder import utils
def bind_subparser(subparsers):
parser_destroy = subparsers.add_parser('destroy')
parser_destroy.set_defaults(func=destroy)
return parser_destroy
def destroy(args, cloud, tracker):
"""Destroy a previously built environment."""
created_servers = set()
already_gone = set()
for r in tracker.last_block:
if r.kind == 'server_create':
created_servers.add(r.server.name)
if r.kind == 'server_destroy':
already_gone.add(r.name)
servers = created_servers - already_gone
if not servers:
print("Nothing to destroy.")
else:
while servers:
server = servers.pop()
print("Destroying server %s, please wait..." % server)
cloud.delete_server(server, wait=True)
tracker.record({'kind': 'server_destroy', 'name': server})
# TODO(harlowja): we should be able to remove individual creates,
# but for now this will be the crappy way of closing off the
# previously unfinished business.
if tracker.status == utils.Tracker.INCOMPLETE:
tracker.mark_end()
|
e4b1b8346834ad1ea8a176184a5b6443a2a91099
|
ddsc_incron/settings.py
|
ddsc_incron/settings.py
|
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
}
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
Add handler for logging to RabbitMQ
|
Add handler for logging to RabbitMQ
|
Python
|
mit
|
ddsc/ddsc-incron
|
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
}
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
Add handler for logging to RabbitMQ
|
from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
<commit_before># Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
}
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
<commit_msg>Add handler for logging to RabbitMQ<commit_after>
|
from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
}
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
Add handler for logging to RabbitMQfrom __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
<commit_before># Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
}
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
<commit_msg>Add handler for logging to RabbitMQ<commit_after>from __future__ import absolute_import
from ddsc_incron.celery import celery
# Note that logging to a single file from multiple processes is NOT supported.
# See: http://docs.python.org/2/howto/logging-cookbook.html
# #logging-to-a-single-file-from-multiple-processes
# This very much applies to ddsc-incron!
# TODO: Consider ConcurrentLogHandler on pypi when this bug is solved?
# https://bugzilla.redhat.com/show_bug.cgi?id=858912
BROKER_URL = celery.conf['BROKER_URL']
LOGGING = {
'version': 1,
'formatters': {
'verbose': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'level': 'DEBUG',
},
'null': {
'class': 'logging.NullHandler',
},
'rmq': {
'class': 'ddsc_logging.handlers.DDSCHandler',
'formatter': 'verbose',
'level': 'DEBUG',
'broker_url': BROKER_URL,
},
},
'loggers': {
'': {
'handlers': ['null'],
'level': 'DEBUG',
},
},
}
try:
# Allow each environment to override these settings.
from ddsc_incron.localsettings import * # NOQA
except ImportError:
pass
|
170d08442aeabaa5f8e2fb1fef14f9e6464eeeab
|
core/parser.py
|
core/parser.py
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
self.log = open(config.log_file)
def isLog(line):
"""
Checks if a log entry is a fail2ban log entry
"""
# TODO : Change this to some regex magic ?
if "fail2ban" in line and "Ban" in line:
return True
else:
return False
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
entries = []
while True:
line = self.log.readline()
if self.isLog(line):
entries.append(line)
return entries
def parseRT(self):
"""
Parses the log file in realtime : only the upcoming log bans will be
"""
# TODO : maybe use a separate thread for rt parsing ?
pass
|
Add : implemented parsing method.
|
Add : implemented parsing method.
|
Python
|
mit
|
nocternology/fail2dash,nocternology/fail2dash
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
Add : implemented parsing method.
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
self.log = open(config.log_file)
def isLog(line):
"""
Checks if a log entry is a fail2ban log entry
"""
# TODO : Change this to some regex magic ?
if "fail2ban" in line and "Ban" in line:
return True
else:
return False
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
entries = []
while True:
line = self.log.readline()
if self.isLog(line):
entries.append(line)
return entries
def parseRT(self):
"""
Parses the log file in realtime : only the upcoming log bans will be
"""
# TODO : maybe use a separate thread for rt parsing ?
pass
|
<commit_before>class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
<commit_msg>Add : implemented parsing method.<commit_after>
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
self.log = open(config.log_file)
def isLog(line):
"""
Checks if a log entry is a fail2ban log entry
"""
# TODO : Change this to some regex magic ?
if "fail2ban" in line and "Ban" in line:
return True
else:
return False
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
entries = []
while True:
line = self.log.readline()
if self.isLog(line):
entries.append(line)
return entries
def parseRT(self):
"""
Parses the log file in realtime : only the upcoming log bans will be
"""
# TODO : maybe use a separate thread for rt parsing ?
pass
|
class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
Add : implemented parsing method.class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
self.log = open(config.log_file)
def isLog(line):
"""
Checks if a log entry is a fail2ban log entry
"""
# TODO : Change this to some regex magic ?
if "fail2ban" in line and "Ban" in line:
return True
else:
return False
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
entries = []
while True:
line = self.log.readline()
if self.isLog(line):
entries.append(line)
return entries
def parseRT(self):
"""
Parses the log file in realtime : only the upcoming log bans will be
"""
# TODO : maybe use a separate thread for rt parsing ?
pass
|
<commit_before>class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
<commit_msg>Add : implemented parsing method.<commit_after>class Parser(object):
"""
Parser class definition.
Takes care of parsing the fail2ban log, either in "all" mode or "realtime".
"""
def __init__(self, config, mode=None):
"""
Inits the object by registering the configuration object
"""
self.config = config
if mode is None:
self.mode = config.parse_default
self.log = open(config.log_file)
def isLog(line):
"""
Checks if a log entry is a fail2ban log entry
"""
# TODO : Change this to some regex magic ?
if "fail2ban" in line and "Ban" in line:
return True
else:
return False
def parseAll(self):
"""
Parses all the previous entries in the fail2ban log without realtime
support.
"""
entries = []
while True:
line = self.log.readline()
if self.isLog(line):
entries.append(line)
return entries
def parseRT(self):
"""
Parses the log file in realtime : only the upcoming log bans will be
"""
# TODO : maybe use a separate thread for rt parsing ?
pass
|
c72fee131a1fd3e657ea73ae98da1f5b4b021995
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
books/CrackingCodesWithPython/Chapter20/vigenereDictionaryHacker.py
|
# Vigenere Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
Update vigenereDicitonaryHacker: added è character
|
Update vigenereDicitonaryHacker: added è character
|
Python
|
mit
|
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
|
# Vigenere Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()Update vigenereDicitonaryHacker: added è character
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenere Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()<commit_msg>Update vigenereDicitonaryHacker: added è character<commit_after>
|
# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
# Vigenere Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()Update vigenereDicitonaryHacker: added è character# Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
<commit_before># Vigenere Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()<commit_msg>Update vigenereDicitonaryHacker: added è character<commit_after># Vigenère Cipher Dictionary Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import detectEnglish, vigenereCipher, pyperclip
def main():
ciphertext = """Tzx isnz eccjxkg nfq lol mys bbqq I lxcz."""
hackedMessage = hackVigenereDictionary(ciphertext)
if hackedMessage != None:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackVigenereDictionary(ciphertext):
fo = open('dictionary.txt')
words = fo.readlines()
fo.close()
for word in lines:
word = word.strip() # Remove the newline at the end.
decryptedText = vigenereCipher.decryptMessage(word, ciphertext)
if detectEnglish.isEnglish(decryptedText, wordPercentage=40):
# Check with user to see if the decrypted key has been found:
print()
print('Possible encryption break:')
print('Key ' + str(word) + ': ' + decryptedText[:100])
print()
print('Enter D for done, or just press Enter to continue breaking:')
response = input('> ')
if response.upper().startswith('D'):
return decryptedText
if __name__ == '__main__':
main()
|
c6c06ab8197bfe3f007bab231536656abfcf0954
|
docs/conf.py
|
docs/conf.py
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
Add mock for shapely module
|
Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.
|
Python
|
apache-2.0
|
mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
<commit_before># -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
<commit_msg>Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.<commit_after>
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.# -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
<commit_before># -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
<commit_msg>Add mock for shapely module
Adding a mock for the shapely module allows ReadTheDocs to build the
docs even though Shapely isn't installed.<commit_after># -*- coding: utf-8 -*-
import os
import sphinx_rtd_theme
import sys
from unittest import mock
# Add repository root so we can import ichnaea things
REPO_DIR = os.path.dirname(os.path.dirname(__file__))
sys.path.append(REPO_DIR)
# Fake the shapely module so things will import
sys.modules['shapely'] = mock.MagicMock()
project = 'Ichnaea'
copyright = '2013-2019, Mozilla'
# The short X.Y version.
version = '2.0'
# The full version, including alpha/beta/rc tags.
release = '2.0'
autoclass_content = 'class'
exclude_patterns = ['build/html/README.rst', '.DS_Store', 'Thumbs.db']
html_static_path = []
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
master_doc = 'index'
modindex_common_prefix = ['ichnaea.']
pygments_style = 'sphinx'
source_suffix = '.rst'
templates_path = ['_templates']
extensions = [
'sphinx.ext.linkcode',
'everett.sphinxext',
]
def linkcode_resolve(domain, info):
if domain != 'py':
return None
if not info['module']:
return None
filename = info['module'].replace('.', '/')
return "https://github.com/mozilla/ichnaea/tree/master/%s.py" % filename
|
c6447310063a1521d83a4f7fd0b4bc548d54835b
|
test/test_get_new.py
|
test/test_get_new.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
@pytest.mark.trylast
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
import sys
@pytest.fixture("function")
def create_zip(request):
def teardown():
if os.path.isfile("project.zip"):
os.remove("project.zip")
request.addfinalizer(teardown)
if sys.version_info[0]==2:
empty_zip_data = 'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
else:
empty_zip_data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
with open('project.zip', 'wb') as zip:
zip.write(empty_zip_data)
@pytest.mark.trylast
@needinternet
def test_check_vers_update(create_zip, fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
Add create_zip fixture to get_new test to cover more branches
|
Add create_zip fixture to get_new test to cover more branches
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
@pytest.mark.trylast
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
Add create_zip fixture to get_new test to cover more branches
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
import sys
@pytest.fixture("function")
def create_zip(request):
def teardown():
if os.path.isfile("project.zip"):
os.remove("project.zip")
request.addfinalizer(teardown)
if sys.version_info[0]==2:
empty_zip_data = 'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
else:
empty_zip_data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
with open('project.zip', 'wb') as zip:
zip.write(empty_zip_data)
@pytest.mark.trylast
@needinternet
def test_check_vers_update(create_zip, fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
@pytest.mark.trylast
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
<commit_msg>Add create_zip fixture to get_new test to cover more branches<commit_after>
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
import sys
@pytest.fixture("function")
def create_zip(request):
def teardown():
if os.path.isfile("project.zip"):
os.remove("project.zip")
request.addfinalizer(teardown)
if sys.version_info[0]==2:
empty_zip_data = 'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
else:
empty_zip_data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
with open('project.zip', 'wb') as zip:
zip.write(empty_zip_data)
@pytest.mark.trylast
@needinternet
def test_check_vers_update(create_zip, fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
@pytest.mark.trylast
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
Add create_zip fixture to get_new test to cover more branchesfrom __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
import sys
@pytest.fixture("function")
def create_zip(request):
def teardown():
if os.path.isfile("project.zip"):
os.remove("project.zip")
request.addfinalizer(teardown)
if sys.version_info[0]==2:
empty_zip_data = 'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
else:
empty_zip_data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
with open('project.zip', 'wb') as zip:
zip.write(empty_zip_data)
@pytest.mark.trylast
@needinternet
def test_check_vers_update(create_zip, fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
@pytest.mark.trylast
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
<commit_msg>Add create_zip fixture to get_new test to cover more branches<commit_after>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import pytest
import os
import sys
@pytest.fixture("function")
def create_zip(request):
def teardown():
if os.path.isfile("project.zip"):
os.remove("project.zip")
request.addfinalizer(teardown)
if sys.version_info[0]==2:
empty_zip_data = 'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
else:
empty_zip_data = b'PK\x05\x06\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
with open('project.zip', 'wb') as zip:
zip.write(empty_zip_data)
@pytest.mark.trylast
@needinternet
def test_check_vers_update(create_zip, fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('filling up the boring replacements',
r'http://rlee287.github.io/pyautoupdate/testing/')
launch._get_new()
with open(os.path.abspath("downloads/extradir/blah.py"), "r") as file_code:
file_text=file_code.read()
assert "new version" in file_text
|
81246153033d38132903759cb7e33cf86c26a548
|
tests/test_attime.py
|
tests/test_attime.py
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
'10:00',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
Make sure HH:MM values are allowed
|
Make sure HH:MM values are allowed
|
Python
|
apache-2.0
|
michaelrice/graphite-api,alphapigger/graphite-api,Knewton/graphite-api,vladimir-smirnov-sociomantic/graphite-api,hubrick/graphite-api,GeorgeJahad/graphite-api,absalon-james/graphite-api,raintank/graphite-api,winguru/graphite-api,DaveBlooman/graphite-api,absalon-james/graphite-api,alphapigger/graphite-api,raintank/graphite-api,raintank/graphite-api,rackerlabs/graphite-api,michaelrice/graphite-api,DaveBlooman/graphite-api,Knewton/graphite-api,bogus-py/graphite-api,cybem/graphite-api-iow,vladimir-smirnov-sociomantic/graphite-api,GeorgeJahad/graphite-api,brutasse/graphite-api,tpeng/graphite-api,winguru/graphite-api,cybem/graphite-api-iow,rackerlabs/graphite-api,brutasse/graphite-api,hubrick/graphite-api,bogus-py/graphite-api,tpeng/graphite-api
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
Make sure HH:MM values are allowed
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
'10:00',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
<commit_before>import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
<commit_msg>Make sure HH:MM values are allowed<commit_after>
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
'10:00',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
Make sure HH:MM values are allowedimport datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
'10:00',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
<commit_before>import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
<commit_msg>Make sure HH:MM values are allowed<commit_after>import datetime
import time
from graphite_api.render.attime import parseATTime
from . import TestCase
class AtTestCase(TestCase):
def test_parse(self):
for value in [
str(int(time.time())),
'20140319',
'20130319+1y',
'20130319+1mon',
'20130319+1w',
'12:12_20130319',
'3:05am_20130319',
'3:05pm_20130319',
'noon20130319',
'midnight20130319',
'teatime20130319',
'yesterday',
'tomorrow',
'03/19/2014',
'03/19/1800',
'03/19/1950',
'feb 27',
'mar 5',
'mon',
'tue',
'wed',
'thu',
'fri',
'sat',
'sun',
'10:00',
]:
self.assertIsInstance(parseATTime(value), datetime.datetime)
for value in [
'20130319+1foo',
'mar',
'wat',
]:
with self.assertRaises(Exception):
parseATTime(value)
|
21d45e38d07a413aeeb19e10a68e540d1f6d5851
|
core/forms.py
|
core/forms.py
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
Remove last references to flatpage so it doesnt show up on admin page
|
Remove last references to flatpage so it doesnt show up on admin page
|
Python
|
agpl-3.0
|
tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador,tic-ull/portal-del-investigador
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
Remove last references to flatpage so it doesnt show up on admin page
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
<commit_before># -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
<commit_msg>Remove last references to flatpage so it doesnt show up on admin page<commit_after>
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
Remove last references to flatpage so it doesnt show up on admin page# -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
<commit_before># -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from django.contrib.flatpages.admin import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
<commit_msg>Remove last references to flatpage so it doesnt show up on admin page<commit_after># -*- encoding: UTF-8 -*-
from core import settings as stCore
from django import forms
from django.conf import settings as st
from flatpages_i18n.forms import FlatpageForm
from django.contrib.sites.models import Site
from django.forms.widgets import HiddenInput, MultipleHiddenInput
class PageForm(FlatpageForm):
url = forms.CharField(label='', max_length=100, required=False)
sites = forms.ModelMultipleChoiceField(queryset=Site.objects.all(),
required=False, label='')
def __init__(self, *args, **kwargs):
super(FlatpageForm, self).__init__(*args, **kwargs)
self.fields['url'].initial = stCore.BASE_URL_FLATPAGES
self.fields['url'].widget = HiddenInput()
self.fields['sites'].widget = MultipleHiddenInput()
def clean_url(self):
return True
def save(self, commit=True):
flatpage = super(PageForm, self).save(commit=False)
flatpage.save()
flatpage.url = stCore.BASE_URL_FLATPAGES + str(flatpage.id) + '/'
flatpage.sites.add(Site.objects.get(id=st.SITE_ID))
return flatpage
class Meta:
widgets = {
'content': forms.widgets.Textarea(),
}
class Media:
js = (st.TINYMCE_JS_URL, st.TINYMCE_JS_TEXTAREA)
|
ac0f0780beb61cab95809b2e0d02e5dab481e225
|
py/valid-parenthesis-string.py
|
py/valid-parenthesis-string.py
|
from collections import Counter
class Solution(object):
def dfs(self, s, pos, stack):
if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]:
return False
if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]:
return False
if pos == len(s):
return not stack
if s[pos] == '(':
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
elif s[pos] == ')':
if not stack:
return False
else:
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
else:
if stack: # treat as ')'
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
# treat as '('
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
# treat as ''
if self.dfs(s, pos + 1, stack):
return True
return False
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
c = Counter(s)
mpo, mpc = c['('] + c['*'], c[')'] + c['*']
self.max_possible_opening = [0]
self.min_possible_opening = [0]
self.max_possible_closing = [0]
self.min_possible_closing = [0]
for c in s:
self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '('))
self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')'))
self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')'))
self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '('))
return self.dfs(s, 0, 0)
|
class Solution(object):
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
lowest, highest = 0, 0
for c in s:
if c == '(':
lowest += 1
highest += 1
elif c == ')':
if lowest > 0:
lowest -= 1
highest -= 1
if highest < 0:
return False
else:
if lowest > 0:
lowest -= 1
highest += 1
return lowest == 0
|
Add py solution for 678. Valid Parenthesis String
|
Add py solution for 678. Valid Parenthesis String
678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/
Approach2:
Maintain the lowest/highest possible stack size and check if one of
them is invalid
O(n) time, O(1) size
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
from collections import Counter
class Solution(object):
def dfs(self, s, pos, stack):
if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]:
return False
if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]:
return False
if pos == len(s):
return not stack
if s[pos] == '(':
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
elif s[pos] == ')':
if not stack:
return False
else:
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
else:
if stack: # treat as ')'
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
# treat as '('
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
# treat as ''
if self.dfs(s, pos + 1, stack):
return True
return False
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
c = Counter(s)
mpo, mpc = c['('] + c['*'], c[')'] + c['*']
self.max_possible_opening = [0]
self.min_possible_opening = [0]
self.max_possible_closing = [0]
self.min_possible_closing = [0]
for c in s:
self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '('))
self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')'))
self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')'))
self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '('))
return self.dfs(s, 0, 0)
Add py solution for 678. Valid Parenthesis String
678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/
Approach2:
Maintain the lowest/highest possible stack size and check if one of
them is invalid
O(n) time, O(1) size
|
class Solution(object):
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
lowest, highest = 0, 0
for c in s:
if c == '(':
lowest += 1
highest += 1
elif c == ')':
if lowest > 0:
lowest -= 1
highest -= 1
if highest < 0:
return False
else:
if lowest > 0:
lowest -= 1
highest += 1
return lowest == 0
|
<commit_before>from collections import Counter
class Solution(object):
def dfs(self, s, pos, stack):
if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]:
return False
if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]:
return False
if pos == len(s):
return not stack
if s[pos] == '(':
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
elif s[pos] == ')':
if not stack:
return False
else:
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
else:
if stack: # treat as ')'
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
# treat as '('
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
# treat as ''
if self.dfs(s, pos + 1, stack):
return True
return False
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
c = Counter(s)
mpo, mpc = c['('] + c['*'], c[')'] + c['*']
self.max_possible_opening = [0]
self.min_possible_opening = [0]
self.max_possible_closing = [0]
self.min_possible_closing = [0]
for c in s:
self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '('))
self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')'))
self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')'))
self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '('))
return self.dfs(s, 0, 0)
<commit_msg>Add py solution for 678. Valid Parenthesis String
678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/
Approach2:
Maintain the lowest/highest possible stack size and check if one of
them is invalid
O(n) time, O(1) size<commit_after>
|
class Solution(object):
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
lowest, highest = 0, 0
for c in s:
if c == '(':
lowest += 1
highest += 1
elif c == ')':
if lowest > 0:
lowest -= 1
highest -= 1
if highest < 0:
return False
else:
if lowest > 0:
lowest -= 1
highest += 1
return lowest == 0
|
from collections import Counter
class Solution(object):
def dfs(self, s, pos, stack):
if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]:
return False
if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]:
return False
if pos == len(s):
return not stack
if s[pos] == '(':
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
elif s[pos] == ')':
if not stack:
return False
else:
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
else:
if stack: # treat as ')'
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
# treat as '('
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
# treat as ''
if self.dfs(s, pos + 1, stack):
return True
return False
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
c = Counter(s)
mpo, mpc = c['('] + c['*'], c[')'] + c['*']
self.max_possible_opening = [0]
self.min_possible_opening = [0]
self.max_possible_closing = [0]
self.min_possible_closing = [0]
for c in s:
self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '('))
self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')'))
self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')'))
self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '('))
return self.dfs(s, 0, 0)
Add py solution for 678. Valid Parenthesis String
678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/
Approach2:
Maintain the lowest/highest possible stack size and check if one of
them is invalid
O(n) time, O(1) sizeclass Solution(object):
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
lowest, highest = 0, 0
for c in s:
if c == '(':
lowest += 1
highest += 1
elif c == ')':
if lowest > 0:
lowest -= 1
highest -= 1
if highest < 0:
return False
else:
if lowest > 0:
lowest -= 1
highest += 1
return lowest == 0
|
<commit_before>from collections import Counter
class Solution(object):
def dfs(self, s, pos, stack):
if stack + self.min_possible_opening[-1] - self.min_possible_opening[pos] > self.max_possible_closing[-1] - self.max_possible_closing[pos]:
return False
if stack + self.max_possible_opening[-1] - self.max_possible_opening[pos] < self.min_possible_closing[-1] - self.min_possible_closing[pos]:
return False
if pos == len(s):
return not stack
if s[pos] == '(':
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
elif s[pos] == ')':
if not stack:
return False
else:
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
else:
if stack: # treat as ')'
stack -= 1
if self.dfs(s, pos + 1, stack):
return True
stack += 1
# treat as '('
stack += 1
if self.dfs(s, pos + 1, stack):
return True
stack -= 1
# treat as ''
if self.dfs(s, pos + 1, stack):
return True
return False
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
c = Counter(s)
mpo, mpc = c['('] + c['*'], c[')'] + c['*']
self.max_possible_opening = [0]
self.min_possible_opening = [0]
self.max_possible_closing = [0]
self.min_possible_closing = [0]
for c in s:
self.min_possible_opening.append(self.min_possible_opening[-1] + (c == '('))
self.max_possible_opening.append(self.max_possible_opening[-1] + (c != ')'))
self.min_possible_closing.append(self.min_possible_closing[-1] + (c == ')'))
self.max_possible_closing.append(self.max_possible_closing[-1] + (c != '('))
return self.dfs(s, 0, 0)
<commit_msg>Add py solution for 678. Valid Parenthesis String
678. Valid Parenthesis String: https://leetcode.com/problems/valid-parenthesis-string/
Approach2:
Maintain the lowest/highest possible stack size and check if one of
them is invalid
O(n) time, O(1) size<commit_after>class Solution(object):
def checkValidString(self, s):
"""
:type s: str
:rtype: bool
"""
lowest, highest = 0, 0
for c in s:
if c == '(':
lowest += 1
highest += 1
elif c == ')':
if lowest > 0:
lowest -= 1
highest -= 1
if highest < 0:
return False
else:
if lowest > 0:
lowest -= 1
highest += 1
return lowest == 0
|
9c982053f4d9c9696214d7c20ab32204d27e4a94
|
django/__init__.py
|
django/__init__.py
|
VERSION = (1, 6, 0, 'beta', 4)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
VERSION = (1, 6, 0, 'rc', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
Bump version number for 1.6 release candidate.
|
[1.6.x] Bump version number for 1.6 release candidate.
|
Python
|
bsd-3-clause
|
dex4er/django,dex4er/django,django-nonrel/django,felixjimenez/django,django-nonrel/django,dex4er/django,felixjimenez/django,django-nonrel/django,django-nonrel/django,redhat-openstack/django,redhat-openstack/django,redhat-openstack/django,felixjimenez/django,redhat-openstack/django,felixjimenez/django
|
VERSION = (1, 6, 0, 'beta', 4)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
[1.6.x] Bump version number for 1.6 release candidate.
|
VERSION = (1, 6, 0, 'rc', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
<commit_before>VERSION = (1, 6, 0, 'beta', 4)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
<commit_msg>[1.6.x] Bump version number for 1.6 release candidate.<commit_after>
|
VERSION = (1, 6, 0, 'rc', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
VERSION = (1, 6, 0, 'beta', 4)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
[1.6.x] Bump version number for 1.6 release candidate.VERSION = (1, 6, 0, 'rc', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
<commit_before>VERSION = (1, 6, 0, 'beta', 4)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
<commit_msg>[1.6.x] Bump version number for 1.6 release candidate.<commit_after>VERSION = (1, 6, 0, 'rc', 1)
def get_version(*args, **kwargs):
# Don't litter django/__init__.py with all the get_version stuff.
# Only import if it's actually called.
from django.utils.version import get_version
return get_version(*args, **kwargs)
|
32637c1e9a37dc416df802805420d38f9af18d79
|
django_lightweight_queue/management/commands/queue_configuration.py
|
django_lightweight_queue/management/commands/queue_configuration.py
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in app_settings.WORKERS.items():
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in sorted(app_settings.WORKERS.items()):
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
Print the discovered queues in alphabetical order for convenience
|
Print the discovered queues in alphabetical order for convenience
|
Python
|
bsd-3-clause
|
thread/django-lightweight-queue,thread/django-lightweight-queue
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in app_settings.WORKERS.items():
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
Print the discovered queues in alphabetical order for convenience
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in sorted(app_settings.WORKERS.items()):
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
<commit_before>from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in app_settings.WORKERS.items():
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
<commit_msg>Print the discovered queues in alphabetical order for convenience<commit_after>
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in sorted(app_settings.WORKERS.items()):
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in app_settings.WORKERS.items():
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
Print the discovered queues in alphabetical order for conveniencefrom django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in sorted(app_settings.WORKERS.items()):
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
<commit_before>from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in app_settings.WORKERS.items():
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
<commit_msg>Print the discovered queues in alphabetical order for convenience<commit_after>from django.core.management.base import BaseCommand
from ... import app_settings
from ...utils import get_backend, load_extra_config
from ...cron_scheduler import get_cron_config
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('--config', action='store', default=None,
help="The path to an additional django-style config file to load")
def handle(self, **options):
# Configuration overrides
extra_config = options['config']
if extra_config is not None:
load_extra_config(extra_config)
print("django-lightweight-queue")
print("========================")
print("")
print("{0:<55} {1:<5} {2}".format("Queue name", "Concurrency", "Backend"))
print("-" * 27)
for k, v in sorted(app_settings.WORKERS.items()):
print(" {0:<54} {1:<5} {2}".format(
k,
v,
get_backend(k).__class__.__name__,
))
print("")
print("Middleware:")
for x in app_settings.MIDDLEWARE:
print(" * %s" % x)
print("")
print("Cron configuration")
for x in get_cron_config():
print("")
for k in (
'command',
'command_args',
'hours',
'minutes',
'queue',
'timeout',
'sigkill_on_stop',
):
print("% 20s: %s" % (k, x.get(k, '-')))
|
e1e7b72685df12d1d7d782e03878253663a4c790
|
mysite/scripts/remove_numbers_from_locations.py
|
mysite/scripts/remove_numbers_from_locations.py
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d', p.location_display_name)
unweirded_location = "".join(location_pieces)
p.location_display_name = unweirded_location
p.save()
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d,', p.location_display_name)
unweirded_location = ",".join(location_pieces)
if unweirded_location != p.location_display_name:
print "weird location: " + p.location_display_name
p.location_display_name = unweirded_location
print "unweirded location: " + p.location_display_name
p.save()
|
Fix script that removes numerals from people's locations.
|
Fix script that removes numerals from people's locations.
|
Python
|
agpl-3.0
|
campbe13/openhatch,heeraj123/oh-mainline,moijes12/oh-mainline,ojengwa/oh-mainline,Changaco/oh-mainline,vipul-sharma20/oh-mainline,campbe13/openhatch,SnappleCap/oh-mainline,SnappleCap/oh-mainline,waseem18/oh-mainline,eeshangarg/oh-mainline,moijes12/oh-mainline,waseem18/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,ojengwa/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,willingc/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,sudheesh001/oh-mainline,vipul-sharma20/oh-mainline,mzdaniel/oh-mainline,willingc/oh-mainline,eeshangarg/oh-mainline,nirmeshk/oh-mainline,moijes12/oh-mainline,SnappleCap/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,SnappleCap/oh-mainline,jledbetter/openhatch,openhatch/oh-mainline,eeshangarg/oh-mainline,openhatch/oh-mainline,waseem18/oh-mainline,Changaco/oh-mainline,jledbetter/openhatch,ehashman/oh-mainline,openhatch/oh-mainline,willingc/oh-mainline,ehashman/oh-mainline,jledbetter/openhatch,onceuponatimeforever/oh-mainline,moijes12/oh-mainline,mzdaniel/oh-mainline,vipul-sharma20/oh-mainline,sudheesh001/oh-mainline,heeraj123/oh-mainline,openhatch/oh-mainline,ehashman/oh-mainline,mzdaniel/oh-mainline,eeshangarg/oh-mainline,SnappleCap/oh-mainline,nirmeshk/oh-mainline,mzdaniel/oh-mainline,Changaco/oh-mainline,waseem18/oh-mainline,vipul-sharma20/oh-mainline,openhatch/oh-mainline,campbe13/openhatch,Changaco/oh-mainline,ojengwa/oh-mainline,onceuponatimeforever/oh-mainline,sudheesh001/oh-mainline,onceuponatimeforever/oh-mainline,ehashman/oh-mainline,nirmeshk/oh-mainline,nirmeshk/oh-mainline,moijes12/oh-mainline,Changaco/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,jledbetter/openhatch,campbe13/openhatch,ehashman/oh-mainline,willingc/oh-mainline,heeraj123/oh-mainline,mzdaniel/oh-mainline,ojengwa/oh-mainline,waseem18/oh-mainline,ojengwa/oh-mainline,nirmeshk/oh-mainline,campbe13/openhatch
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d', p.location_display_name)
unweirded_location = "".join(location_pieces)
p.location_display_name = unweirded_location
p.save()
Fix script that removes numerals from people's locations.
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d,', p.location_display_name)
unweirded_location = ",".join(location_pieces)
if unweirded_location != p.location_display_name:
print "weird location: " + p.location_display_name
p.location_display_name = unweirded_location
print "unweirded location: " + p.location_display_name
p.save()
|
<commit_before>import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d', p.location_display_name)
unweirded_location = "".join(location_pieces)
p.location_display_name = unweirded_location
p.save()
<commit_msg>Fix script that removes numerals from people's locations.<commit_after>
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d,', p.location_display_name)
unweirded_location = ",".join(location_pieces)
if unweirded_location != p.location_display_name:
print "weird location: " + p.location_display_name
p.location_display_name = unweirded_location
print "unweirded location: " + p.location_display_name
p.save()
|
import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d', p.location_display_name)
unweirded_location = "".join(location_pieces)
p.location_display_name = unweirded_location
p.save()
Fix script that removes numerals from people's locations.import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d,', p.location_display_name)
unweirded_location = ",".join(location_pieces)
if unweirded_location != p.location_display_name:
print "weird location: " + p.location_display_name
p.location_display_name = unweirded_location
print "unweirded location: " + p.location_display_name
p.save()
|
<commit_before>import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d', p.location_display_name)
unweirded_location = "".join(location_pieces)
p.location_display_name = unweirded_location
p.save()
<commit_msg>Fix script that removes numerals from people's locations.<commit_after>import re
import mysite
Person = mysite.profile.models.Person
people_with_weird_locations = Person.objects.filter(location_display_name__regex=', [0-9][0-9],')
for p in people_with_weird_locations:
location_pieces = re.split(r', \d\d,', p.location_display_name)
unweirded_location = ",".join(location_pieces)
if unweirded_location != p.location_display_name:
print "weird location: " + p.location_display_name
p.location_display_name = unweirded_location
print "unweirded location: " + p.location_display_name
p.save()
|
b23c843fda57e0ffa56aaf430d9a590e2ed0ec9a
|
ch06/extract_airlines.py
|
ch06/extract_airlines.py
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
Check variable for None value before null string when filtering tail numbers
|
Check variable for None value before null string when filtering tail numbers
|
Python
|
mit
|
rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2,rjurney/Agile_Data_Code_2,naoyak/Agile_Data_Code_2
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
Check variable for None value before null string when filtering tail numbers
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
<commit_before># Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
<commit_msg>Check variable for None value before null string when filtering tail numbers<commit_after>
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
Check variable for None value before null string when filtering tail numbers# Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
<commit_before># Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
<commit_msg>Check variable for None value before null string when filtering tail numbers<commit_after># Load the on-time parquet file
on_time_dataframe = spark.read.parquet('data/on_time_performance.parquet')
# The first step is easily expressed as SQL: get all unique tail numbers for each airline
on_time_dataframe.registerTempTable("on_time_performance")
carrier_airplane = spark.sql(
"SELECT DISTINCT Carrier, TailNum FROM on_time_performance"
)
# Now we need to store a sorted group for each Carrier, along with a fleet count
airplanes_per_carrier = carrier_airplane.rdd\
.map(lambda nameTuple: (nameTuple[0], [nameTuple[1]]))\
.reduceByKey(lambda a, b: a + b)\
.map(lambda tuple:
{
'Carrier': tuple[0],
'TailNumbers': sorted(
filter(
lambda x: x is not None and x != '', tuple[1] # empty string tail numbers were getting through
)
),
'FleetCount': len(tuple[1])
}
)
airplanes_per_carrier.count() # 14
# Save to Mongo in the airplanes_per_carrier relation
import pymongo_spark
pymongo_spark.activate()
airplanes_per_carrier.saveToMongoDB(
'mongodb://localhost:27017/agile_data_science.airplanes_per_carrier'
)
|
dbc09d03f62bf2d5ee1661492a4c20a7942f81a9
|
tests/basics/list_slice.py
|
tests/basics/list_slice.py
|
# test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
#print(x[::c])
print(x[:b])
print(x[:b:])
#print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
#print(x[a::c])
print(x[a:b])
print(x[a:b:])
#print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
|
# test list slices, getting values
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
print(x[::c])
print(x[:b])
print(x[:b:])
print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
print(x[a::c])
print(x[a:b])
print(x[a:b:])
print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
try:
[][::0]
except ValueError:
print('ValueError')
|
Enable tests for list slice getting with 3rd arg.
|
tests/basics: Enable tests for list slice getting with 3rd arg.
Also add a test to check case when 3rd arg is 0.
|
Python
|
mit
|
tuc-osg/micropython,mhoffma/micropython,trezor/micropython,blazewicz/micropython,AriZuu/micropython,kerneltask/micropython,swegener/micropython,MrSurly/micropython,mhoffma/micropython,hiway/micropython,alex-robbins/micropython,henriknelson/micropython,tuc-osg/micropython,adafruit/micropython,selste/micropython,ryannathans/micropython,tobbad/micropython,kerneltask/micropython,cwyark/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,tobbad/micropython,oopy/micropython,chrisdearman/micropython,hiway/micropython,bvernoux/micropython,tralamazza/micropython,ryannathans/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,tralamazza/micropython,chrisdearman/micropython,TDAbboud/micropython,adafruit/circuitpython,puuu/micropython,lowRISC/micropython,torwag/micropython,MrSurly/micropython,kerneltask/micropython,mhoffma/micropython,tobbad/micropython,puuu/micropython,alex-robbins/micropython,puuu/micropython,henriknelson/micropython,HenrikSolver/micropython,tuc-osg/micropython,Timmenem/micropython,pfalcon/micropython,toolmacher/micropython,infinnovation/micropython,mhoffma/micropython,selste/micropython,adafruit/micropython,pozetroninc/micropython,torwag/micropython,HenrikSolver/micropython,pramasoul/micropython,Peetz0r/micropython-esp32,swegener/micropython,pramasoul/micropython,AriZuu/micropython,PappaPeppar/micropython,oopy/micropython,TDAbboud/micropython,adafruit/micropython,bvernoux/micropython,lowRISC/micropython,blazewicz/micropython,pramasoul/micropython,pozetroninc/micropython,tuc-osg/micropython,hiway/micropython,Timmenem/micropython,blazewicz/micropython,trezor/micropython,Timmenem/micropython,selste/micropython,tralamazza/micropython,infinnovation/micropython,tobbad/micropython,deshipu/micropython,deshipu/micropython,torwag/micropython,lowRISC/micropython,dmazzella/micropython,TDAbboud/micropython,swegener/micropython,deshipu/micropython,mhoffma/micropython,selste/micropython,MrSurly/micropython-esp32,Peetz0r/micropython-esp32,adafruit/circuitpython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,MrSurly/micropython,tobbad/micropython,selste/micropython,swegener/micropython,tralamazza/micropython,deshipu/micropython,oopy/micropython,henriknelson/micropython,Timmenem/micropython,lowRISC/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,adafruit/circuitpython,henriknelson/micropython,cwyark/micropython,blazewicz/micropython,lowRISC/micropython,puuu/micropython,PappaPeppar/micropython,MrSurly/micropython,adafruit/micropython,MrSurly/micropython-esp32,PappaPeppar/micropython,PappaPeppar/micropython,SHA2017-badge/micropython-esp32,trezor/micropython,chrisdearman/micropython,hiway/micropython,infinnovation/micropython,puuu/micropython,dmazzella/micropython,blazewicz/micropython,henriknelson/micropython,pramasoul/micropython,HenrikSolver/micropython,micropython/micropython-esp32,chrisdearman/micropython,toolmacher/micropython,toolmacher/micropython,Timmenem/micropython,pozetroninc/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,SHA2017-badge/micropython-esp32,micropython/micropython-esp32,TDAbboud/micropython,PappaPeppar/micropython,deshipu/micropython,ryannathans/micropython,infinnovation/micropython,pfalcon/micropython,Peetz0r/micropython-esp32,micropython/micropython-esp32,trezor/micropython,infinnovation/micropython,torwag/micropython,ryannathans/micropython,AriZuu/micropython,pozetroninc/micropython,adafruit/micropython,trezor/micropython,HenrikSolver/micropython,bvernoux/micropython,cwyark/micropython,alex-robbins/micropython,ryannathans/micropython,alex-robbins/micropython,HenrikSolver/micropython,pramasoul/micropython,adafruit/circuitpython,toolmacher/micropython,toolmacher/micropython,pfalcon/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,pfalcon/micropython,MrSurly/micropython,pfalcon/micropython,cwyark/micropython,torwag/micropython,AriZuu/micropython,bvernoux/micropython,oopy/micropython,hiway/micropython,TDAbboud/micropython,dmazzella/micropython,alex-robbins/micropython,adafruit/circuitpython,oopy/micropython,pozetroninc/micropython,dmazzella/micropython,swegener/micropython,micropython/micropython-esp32,cwyark/micropython,AriZuu/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,kerneltask/micropython
|
# test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
#print(x[::c])
print(x[:b])
print(x[:b:])
#print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
#print(x[a::c])
print(x[a:b])
print(x[a:b:])
#print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
tests/basics: Enable tests for list slice getting with 3rd arg.
Also add a test to check case when 3rd arg is 0.
|
# test list slices, getting values
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
print(x[::c])
print(x[:b])
print(x[:b:])
print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
print(x[a::c])
print(x[a:b])
print(x[a:b:])
print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
try:
[][::0]
except ValueError:
print('ValueError')
|
<commit_before># test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
#print(x[::c])
print(x[:b])
print(x[:b:])
#print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
#print(x[a::c])
print(x[a:b])
print(x[a:b:])
#print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
<commit_msg>tests/basics: Enable tests for list slice getting with 3rd arg.
Also add a test to check case when 3rd arg is 0.<commit_after>
|
# test list slices, getting values
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
print(x[::c])
print(x[:b])
print(x[:b:])
print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
print(x[a::c])
print(x[a:b])
print(x[a:b:])
print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
try:
[][::0]
except ValueError:
print('ValueError')
|
# test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
#print(x[::c])
print(x[:b])
print(x[:b:])
#print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
#print(x[a::c])
print(x[a:b])
print(x[a:b:])
#print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
tests/basics: Enable tests for list slice getting with 3rd arg.
Also add a test to check case when 3rd arg is 0.# test list slices, getting values
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
print(x[::c])
print(x[:b])
print(x[:b:])
print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
print(x[a::c])
print(x[a:b])
print(x[a:b:])
print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
try:
[][::0]
except ValueError:
print('ValueError')
|
<commit_before># test slices; only 2 argument version supported by Micro Python at the moment
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
#print(x[::c])
print(x[:b])
print(x[:b:])
#print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
#print(x[a::c])
print(x[a:b])
print(x[a:b:])
#print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
<commit_msg>tests/basics: Enable tests for list slice getting with 3rd arg.
Also add a test to check case when 3rd arg is 0.<commit_after># test list slices, getting values
x = list(range(10))
a = 2
b = 4
c = 3
print(x[:])
print(x[::])
print(x[::c])
print(x[:b])
print(x[:b:])
print(x[:b:c])
print(x[a])
print(x[a:])
print(x[a::])
print(x[a::c])
print(x[a:b])
print(x[a:b:])
print(x[a:b:c])
# these should not raise IndexError
print([][1:])
print([][-1:])
try:
[][::0]
except ValueError:
print('ValueError')
|
351c05b6e474b266a7594a775cb48cd7cfe0b833
|
shapely/linref.py
|
shapely/linref.py
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
Allow linear referencing on rings.
|
Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too.
|
Python
|
bsd-3-clause
|
abali96/Shapely,mouadino/Shapely,mindw/shapely,abali96/Shapely,jdmcbr/Shapely,jdmcbr/Shapely,mindw/shapely,mouadino/Shapely
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too.
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
<commit_before>"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
<commit_msg>Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too.<commit_after>
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too."""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
<commit_before>"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
<commit_msg>Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too.<commit_after>"""Linear referencing
"""
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
9818fb927bcc096fbb6a3b075be867a5709c3d0f
|
simpy/__init__.py
|
simpy/__init__.py
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Process, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
Add Process to the simpy namespace.
|
Add Process to the simpy namespace.
|
Python
|
mit
|
Uzere/uSim
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
Add Process to the simpy namespace.
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Process, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
<commit_before># encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
<commit_msg>Add Process to the simpy namespace.<commit_after>
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Process, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
Add Process to the simpy namespace.# encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Process, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
<commit_before># encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
<commit_msg>Add Process to the simpy namespace.<commit_after># encoding: utf-8
"""
With SimPy, simulating is fun again!
"""
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
from simpy.core import Simulation, Process, Interrupt, Failure
__all__ = ['Simulation', 'Interrupt', 'Failure', 'test']
__version__ = '3.0a1'
def test():
"""Runs SimPy’s test suite via *py.test*."""
import os.path
try:
import mock
import pytest
except ImportError:
print('You need pytest and mock to run the tests. '
'Try "pip install pytest mock".')
else:
pytest.main([os.path.dirname(__file__)])
|
6f6e16cfabb7c3ff3f634718b16f87bd7705d284
|
tests/v7/test_item_list.py
|
tests/v7/test_item_list.py
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
item_list_2 = ItemList(values)
assert item_list == item_list_2
item_list_3 = ItemList([1, 5, 8, 3])
assert item_list != item_list_3
|
Add a couple more test cases for item list
|
Add a couple more test cases for item list
|
Python
|
mit
|
maxalbert/tohu
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
Add a couple more test cases for item list
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
item_list_2 = ItemList(values)
assert item_list == item_list_2
item_list_3 = ItemList([1, 5, 8, 3])
assert item_list != item_list_3
|
<commit_before>from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
<commit_msg>Add a couple more test cases for item list<commit_after>
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
item_list_2 = ItemList(values)
assert item_list == item_list_2
item_list_3 = ItemList([1, 5, 8, 3])
assert item_list != item_list_3
|
from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
Add a couple more test cases for item listfrom .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
item_list_2 = ItemList(values)
assert item_list == item_list_2
item_list_3 = ItemList([1, 5, 8, 3])
assert item_list != item_list_3
|
<commit_before>from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
<commit_msg>Add a couple more test cases for item list<commit_after>from .context import tohu
from tohu.v7.item_list import ItemList
def test_item_list():
values = [11, 55, 22, 66, 33]
item_list = ItemList(values)
assert item_list.items == values
assert item_list == values
assert len(item_list) == 5
assert item_list[3] == 66
assert [x for x in item_list] == values
item_list_2 = ItemList(values)
assert item_list == item_list_2
item_list_3 = ItemList([1, 5, 8, 3])
assert item_list != item_list_3
|
70b037496140dd2e9e6d71508835390f0c85bc02
|
skltn/metadata.py
|
skltn/metadata.py
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = ['John Doe']
authors_string = ', '.join(authors)
emails = ['foobar@example.com', 'johndoe@thisisfake.org']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://example.com/'
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
import subprocess
def get_author_detail(arg='name'):
p = subprocess.Popen(['git', 'config', 'user.{}'.format(arg)],
stdout=subprocess.PIPE)
try:
out, _ = p.communicate()
except:
out = ''
return out.strip() or None
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = [get_author_detail('name') or 'John Doe']
authors_string = ', '.join(authors)
emails = [get_author_detail('email') or 'doe@example.org']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://example.com/'
|
Change year to 2016, try to guess author details from git config
|
Change year to 2016, try to guess author details from git config
|
Python
|
mit
|
ksonj/skltn
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = ['John Doe']
authors_string = ', '.join(authors)
emails = ['foobar@example.com', 'johndoe@thisisfake.org']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://example.com/'
Change year to 2016, try to guess author details from git config
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
import subprocess
def get_author_detail(arg='name'):
p = subprocess.Popen(['git', 'config', 'user.{}'.format(arg)],
stdout=subprocess.PIPE)
try:
out, _ = p.communicate()
except:
out = ''
return out.strip() or None
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = [get_author_detail('name') or 'John Doe']
authors_string = ', '.join(authors)
emails = [get_author_detail('email') or 'doe@example.org']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://example.com/'
|
<commit_before># -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = ['John Doe']
authors_string = ', '.join(authors)
emails = ['foobar@example.com', 'johndoe@thisisfake.org']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://example.com/'
<commit_msg>Change year to 2016, try to guess author details from git config<commit_after>
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
import subprocess
def get_author_detail(arg='name'):
p = subprocess.Popen(['git', 'config', 'user.{}'.format(arg)],
stdout=subprocess.PIPE)
try:
out, _ = p.communicate()
except:
out = ''
return out.strip() or None
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = [get_author_detail('name') or 'John Doe']
authors_string = ', '.join(authors)
emails = [get_author_detail('email') or 'doe@example.org']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://example.com/'
|
# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = ['John Doe']
authors_string = ', '.join(authors)
emails = ['foobar@example.com', 'johndoe@thisisfake.org']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://example.com/'
Change year to 2016, try to guess author details from git config# -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
import subprocess
def get_author_detail(arg='name'):
p = subprocess.Popen(['git', 'config', 'user.{}'.format(arg)],
stdout=subprocess.PIPE)
try:
out, _ = p.communicate()
except:
out = ''
return out.strip() or None
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = [get_author_detail('name') or 'John Doe']
authors_string = ', '.join(authors)
emails = [get_author_detail('email') or 'doe@example.org']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://example.com/'
|
<commit_before># -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = ['John Doe']
authors_string = ', '.join(authors)
emails = ['foobar@example.com', 'johndoe@thisisfake.org']
license = 'MIT'
copyright = '2015 ' + authors_string
url = 'http://example.com/'
<commit_msg>Change year to 2016, try to guess author details from git config<commit_after># -*- coding: utf-8 -*-
"""Project metadata
Information describing the project.
"""
import subprocess
def get_author_detail(arg='name'):
p = subprocess.Popen(['git', 'config', 'user.{}'.format(arg)],
stdout=subprocess.PIPE)
try:
out, _ = p.communicate()
except:
out = ''
return out.strip() or None
# The package name, which is also the "UNIX name" for the project.
package = 'my_module'
project = "My Awesome Module"
project_no_spaces = project.replace(' ', '')
version = '0.1.0'
description = 'It does cool things'
authors = [get_author_detail('name') or 'John Doe']
authors_string = ', '.join(authors)
emails = [get_author_detail('email') or 'doe@example.org']
license = 'MIT'
copyright = '2016 ' + authors_string
url = 'http://example.com/'
|
f9012b88f60f8e4ac96cb55aea763edc74ad586e
|
shell/view/BuddyIcon.py
|
shell/view/BuddyIcon.py
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
model = self._shell.get_model()
if action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
buddy = self._friend.get_buddy()
if buddy == None:
return
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
buddy = self._friend.get_buddy()
if buddy == None:
return
model = self._shell.get_model()
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
elif action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
|
Move remove code down to fix undefined var error
|
Move remove code down to fix undefined var error
|
Python
|
lgpl-2.1
|
samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,godiard/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,ceibal-tatu/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,manuq/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,tchx84/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,sugarlabs/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
model = self._shell.get_model()
if action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
buddy = self._friend.get_buddy()
if buddy == None:
return
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
Move remove code down to fix undefined var error
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
buddy = self._friend.get_buddy()
if buddy == None:
return
model = self._shell.get_model()
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
elif action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
|
<commit_before>from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
model = self._shell.get_model()
if action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
buddy = self._friend.get_buddy()
if buddy == None:
return
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
<commit_msg>Move remove code down to fix undefined var error<commit_after>
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
buddy = self._friend.get_buddy()
if buddy == None:
return
model = self._shell.get_model()
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
elif action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
|
from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
model = self._shell.get_model()
if action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
buddy = self._friend.get_buddy()
if buddy == None:
return
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
Move remove code down to fix undefined var errorfrom sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
buddy = self._friend.get_buddy()
if buddy == None:
return
model = self._shell.get_model()
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
elif action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
|
<commit_before>from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
model = self._shell.get_model()
if action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
buddy = self._friend.get_buddy()
if buddy == None:
return
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
<commit_msg>Move remove code down to fix undefined var error<commit_after>from sugar.canvas.MenuIcon import MenuIcon
from view.BuddyMenu import BuddyMenu
class BuddyIcon(MenuIcon):
def __init__(self, shell, menu_shell, friend):
MenuIcon.__init__(self, menu_shell, icon_name='stock-buddy',
color=friend.get_color(), size=96)
self._shell = shell
self._friend = friend
def set_popup_distance(self, distance):
self._popup_distance = distance
def create_menu(self):
menu = BuddyMenu(self._shell, self._friend)
menu.connect('action', self._popup_action_cb)
return menu
def _popup_action_cb(self, popup, action):
self.popdown()
buddy = self._friend.get_buddy()
if buddy == None:
return
model = self._shell.get_model()
if action == BuddyMenu.ACTION_INVITE:
activity = model.get_current_activity()
activity.invite(buddy)
elif action == BuddyMenu.ACTION_MAKE_FRIEND:
friends = model.get_friends()
friends.make_friend(buddy)
elif action == BuddyMenu.ACTION_REMOVE_FRIEND:
friends = model.get_friends()
friends.remove(buddy)
|
de0bbf978695d206189ee4effb124234968525cb
|
django_afip/views.py
|
django_afip/views.py
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
"""Renders a receipt as HTML."""
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
"""Renders a receipt as a PDF, prompting to download it."""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
class ReceiptPDFDisplayView(View):
"""
Renders a receipt as a PDF.
Browsers should render the file, rather than prompt to download it.
"""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
generate_receipt_pdf(pk, response)
return response
|
Add a view to display PDF receipts
|
Add a view to display PDF receipts
Fixes #23
Closes !7
Closes !8
|
Python
|
isc
|
hobarrera/django-afip,hobarrera/django-afip
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
Add a view to display PDF receipts
Fixes #23
Closes !7
Closes !8
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
"""Renders a receipt as HTML."""
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
"""Renders a receipt as a PDF, prompting to download it."""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
class ReceiptPDFDisplayView(View):
"""
Renders a receipt as a PDF.
Browsers should render the file, rather than prompt to download it.
"""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
generate_receipt_pdf(pk, response)
return response
|
<commit_before>from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
<commit_msg>Add a view to display PDF receipts
Fixes #23
Closes !7
Closes !8<commit_after>
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
"""Renders a receipt as HTML."""
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
"""Renders a receipt as a PDF, prompting to download it."""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
class ReceiptPDFDisplayView(View):
"""
Renders a receipt as a PDF.
Browsers should render the file, rather than prompt to download it.
"""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
generate_receipt_pdf(pk, response)
return response
|
from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
Add a view to display PDF receipts
Fixes #23
Closes !7
Closes !8from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
"""Renders a receipt as HTML."""
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
"""Renders a receipt as a PDF, prompting to download it."""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
class ReceiptPDFDisplayView(View):
"""
Renders a receipt as a PDF.
Browsers should render the file, rather than prompt to download it.
"""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
generate_receipt_pdf(pk, response)
return response
|
<commit_before>from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
<commit_msg>Add a view to display PDF receipts
Fixes #23
Closes !7
Closes !8<commit_after>from django.http import HttpResponse
from django.utils.translation import ugettext as _
from django.views.generic import View
from .pdf import generate_receipt_pdf
class ReceiptHTMLView(View):
"""Renders a receipt as HTML."""
def get(self, request, pk):
return HttpResponse(
generate_receipt_pdf(pk, request, True),
)
class ReceiptPDFView(View):
"""Renders a receipt as a PDF, prompting to download it."""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename=' + \
_('receipt %s.pdf' % pk)
generate_receipt_pdf(pk, response)
return response
class ReceiptPDFDisplayView(View):
"""
Renders a receipt as a PDF.
Browsers should render the file, rather than prompt to download it.
"""
def get(self, request, pk):
response = HttpResponse(content_type='application/pdf')
generate_receipt_pdf(pk, response)
return response
|
03d8a4e20ee4b6fd49495b7b047ea78d0b9a5bb4
|
dmoj/graders/base.py
|
dmoj/graders/base.py
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
Make source utf-8 encoded bytes.
|
Make source utf-8 encoded bytes.
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
Make source utf-8 encoded bytes.
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
<commit_before>class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
<commit_msg>Make source utf-8 encoded bytes.<commit_after>
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
Make source utf-8 encoded bytes.class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
<commit_before>class BaseGrader(object):
def __init__(self, judge, problem, language, source):
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
<commit_msg>Make source utf-8 encoded bytes.<commit_after>class BaseGrader(object):
def __init__(self, judge, problem, language, source):
if isinstance(source, unicode):
source = source.encode('utf-8')
self.source = source
self.language = language
self.problem = problem
self.judge = judge
self.binary = self._generate_binary()
self._terminate_grading = False
self._current_proc = None
def grade(self, case):
raise NotImplementedError
def _generate_binary(self):
raise NotImplementedError
def terminate_grading(self):
self._terminate_grading = True
if self._current_proc:
try:
self._current_proc.kill()
except OSError:
pass
pass
|
a773d29d7bce78abea28209e53909ab52eee36a9
|
routes.py
|
routes.py
|
from flask import Flask, render_template
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
co.update_completion()
return render_template('pages/result.html')
except Exception as detail:
print detail
if __name__ == '__main__':
app.run()
|
from flask import Flask, render_template, redirect
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
return redirect('/result')
except Exception as detail:
print detail
@app.route('/result')
def result():
co.update_completion()
return render_template('pages/result.html')
if __name__ == '__main__':
app.run()
|
Use flask's redirect() method to go to result link
|
Use flask's redirect() method to go to result link
|
Python
|
mit
|
AlexMathew/tcg-ui
|
from flask import Flask, render_template
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
co.update_completion()
return render_template('pages/result.html')
except Exception as detail:
print detail
if __name__ == '__main__':
app.run()Use flask's redirect() method to go to result link
|
from flask import Flask, render_template, redirect
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
return redirect('/result')
except Exception as detail:
print detail
@app.route('/result')
def result():
co.update_completion()
return render_template('pages/result.html')
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
co.update_completion()
return render_template('pages/result.html')
except Exception as detail:
print detail
if __name__ == '__main__':
app.run()<commit_msg>Use flask's redirect() method to go to result link<commit_after>
|
from flask import Flask, render_template, redirect
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
return redirect('/result')
except Exception as detail:
print detail
@app.route('/result')
def result():
co.update_completion()
return render_template('pages/result.html')
if __name__ == '__main__':
app.run()
|
from flask import Flask, render_template
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
co.update_completion()
return render_template('pages/result.html')
except Exception as detail:
print detail
if __name__ == '__main__':
app.run()Use flask's redirect() method to go to result linkfrom flask import Flask, render_template, redirect
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
return redirect('/result')
except Exception as detail:
print detail
@app.route('/result')
def result():
co.update_completion()
return render_template('pages/result.html')
if __name__ == '__main__':
app.run()
|
<commit_before>from flask import Flask, render_template
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
co.update_completion()
return render_template('pages/result.html')
except Exception as detail:
print detail
if __name__ == '__main__':
app.run()<commit_msg>Use flask's redirect() method to go to result link<commit_after>from flask import Flask, render_template, redirect
from setup_cardsets import CardOperations
co = CardOperations()
app = Flask(__name__)
@app.route('/')
def home():
return render_template('home.html')
@app.route('/rules')
def rules():
return render_template('rules.html')
@app.route('/setup')
def setup():
return render_template('setup.html')
@app.route('/toss/<int:cardcount>')
def toss(cardcount):
co.cardset(cardcount)
return render_template('toss.html')
@app.route('/begin')
def begin():
try:
co.toss_result()
return render_template('pages/begin.html')
except Exception as detail:
print detail
@app.route('/game')
def game():
co.update_page()
co.page_no = 1
return render_template('pages/game.html')
@app.route('/game/<statval>')
def game_move(statval):
try:
completed = co.compare(int(statval))
if not completed:
co.update_page()
return render_template('pages/game.html')
else:
return redirect('/result')
except Exception as detail:
print detail
@app.route('/result')
def result():
co.update_completion()
return render_template('pages/result.html')
if __name__ == '__main__':
app.run()
|
e14b3fad26dce8dad3ca97c06e624f1d6b0764f9
|
mqueue/__init__.py
|
mqueue/__init__.py
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
|
Set default encoding to fix unicode errors
|
Set default encoding to fix unicode errors
|
Python
|
mit
|
synw/django-mqueue,synw/django-mqueue,synw/django-mqueue
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
Set default encoding to fix unicode errors
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
|
<commit_before>__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
<commit_msg>Set default encoding to fix unicode errors<commit_after>
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
|
__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
Set default encoding to fix unicode errors__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
|
<commit_before>__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
<commit_msg>Set default encoding to fix unicode errors<commit_after>__version__ = '0.5.5'
default_app_config = 'mqueue.apps.MqueueConfig'
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
|
50836f606c5bdb9aa4472d109f0dc40e2f0f8dc6
|
examples/apc2016/download_dataset.py
|
examples/apc2016/download_dataset.py
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn.data
import fcn.util
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.utils.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
Fix for renamed module util -> utils
|
Fix for renamed module util -> utils
|
Python
|
mit
|
wkentaro/fcn
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn.data
import fcn.util
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
Fix for renamed module util -> utils
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.utils.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import os.path as osp
import chainer
import fcn.data
import fcn.util
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
<commit_msg>Fix for renamed module util -> utils<commit_after>
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.utils.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import os.path as osp
import chainer
import fcn.data
import fcn.util
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
Fix for renamed module util -> utils#!/usr/bin/env python
import os.path as osp
import chainer
import fcn
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.utils.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
import os.path as osp
import chainer
import fcn.data
import fcn.util
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
<commit_msg>Fix for renamed module util -> utils<commit_after>#!/usr/bin/env python
import os.path as osp
import chainer
import fcn
def main():
dataset_dir = chainer.dataset.get_dataset_directory('apc2016')
path = osp.join(dataset_dir, 'APC2016rbo.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vSV9oLTd1U2I3TDg',
path=path,
)
fcn.util.extract_file(path, to_directory=dataset_dir)
path = osp.join(dataset_dir, 'APC2016JSKseg/annotated.tgz')
fcn.data.cached_download(
url='https://drive.google.com/uc?id=0B9P1L--7Wd2vaExFU1AxWHlMdTg',
path=path,
)
fcn.utils.extract_file(path, to_directory=dataset_dir)
if __name__ == '__main__':
main()
|
3caab02c5e0ca0ebc57f57c77ed550b7e3fc55d2
|
analyze.py
|
analyze.py
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def plot(time_objects, dists):
pass
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def load_data_files(data_folder_path='data'):
data_folder = os.path.join(os.getcwd(), data_folder_path)
files = []
for file in glob(data_folder + "/*/*"):
if '.dat' in file:
files.append(file)
return files
def plot(time_objects, dists):
pass
|
Add helper functions for loading data
|
Add helper functions for loading data
|
Python
|
mit
|
JustinShenk/sensei
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def plot(time_objects, dists):
pass
Add helper functions for loading data
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def load_data_files(data_folder_path='data'):
data_folder = os.path.join(os.getcwd(), data_folder_path)
files = []
for file in glob(data_folder + "/*/*"):
if '.dat' in file:
files.append(file)
return files
def plot(time_objects, dists):
pass
|
<commit_before>import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def plot(time_objects, dists):
pass
<commit_msg>Add helper functions for loading data<commit_after>
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def load_data_files(data_folder_path='data'):
data_folder = os.path.join(os.getcwd(), data_folder_path)
files = []
for file in glob(data_folder + "/*/*"):
if '.dat' in file:
files.append(file)
return files
def plot(time_objects, dists):
pass
|
import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def plot(time_objects, dists):
pass
Add helper functions for loading dataimport os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def load_data_files(data_folder_path='data'):
data_folder = os.path.join(os.getcwd(), data_folder_path)
files = []
for file in glob(data_folder + "/*/*"):
if '.dat' in file:
files.append(file)
return files
def plot(time_objects, dists):
pass
|
<commit_before>import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def plot(time_objects, dists):
pass
<commit_msg>Add helper functions for loading data<commit_after>import os
import pickle
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from datetime import datetime
def load_data(data_path):
'''Return dictionary `data` from string `data_path`
'''
os.path.join(data_path, '1.dat')
data = pickle.load(open(data_path, 'rb'))
return data
def get_baseline(data):
'''Get most recent baseline/calibration from subject.
'''
baselines = []
for k, v in data.items():
if 'baseline' in v:
print(k, v)
baselines.append((k, v))
# Get most recent baseline
return sorted(baselines)[-1][1].split(' ')[-1]
def get_distances(data):
'''Get tuple of posture measurements with time stamps.
Returns:
Tuple - (time_object, distances)
'''
distances = []
for k, v in data.items():
if type(v).__module__ == 'numpy':
# Convert strings to datetime object
time_object = datetime.strptime(k, '%Y-%m-%d_%H-%M-%S')
distances.append((time_object, v[0][2]))
# Sort readings by time to restore order
time_objects, dists = zip(*sorted(zip(time_objects, widths)))
return time_object, dists
def load_data_files(data_folder_path='data'):
data_folder = os.path.join(os.getcwd(), data_folder_path)
files = []
for file in glob(data_folder + "/*/*"):
if '.dat' in file:
files.append(file)
return files
def plot(time_objects, dists):
pass
|
46a5a9b073a5d170133400a3f8b84d2fa8ee24bb
|
salt/states/pagerduty_user.py
|
salt/states/pagerduty_user.py
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those suppored by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those supported by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
Fix typo an suppored -> supported
|
Fix typo an suppored -> supported
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those suppored by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
Fix typo an suppored -> supported
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those supported by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those suppored by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
<commit_msg>Fix typo an suppored -> supported<commit_after>
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those supported by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those suppored by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
Fix typo an suppored -> supported# -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those supported by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those suppored by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
<commit_msg>Fix typo an suppored -> supported<commit_after># -*- coding: utf-8 -*-
'''
Manage PagerDuty users.
Example:
.. code-block:: yaml
ensure bruce test user 1:
pagerduty.user_present:
- name: 'Bruce TestUser1'
- email: bruce+test1@lyft.com
- requester_id: P1GV5NT
'''
def __virtual__():
'''
Only load if the pygerduty module is available in __salt__
'''
return 'pagerduty_user' if 'pagerduty_util.get_resource' in __salt__ else False
def present(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user exists.
Arguments match those supported by
https://developer.pagerduty.com/documentation/rest/users/create.
'''
return __salt__['pagerduty_util.resource_present']('users',
['email', 'name', 'id'],
None,
profile,
subdomain,
api_key,
**kwargs)
def absent(profile='pagerduty', subdomain=None, api_key=None, **kwargs):
'''
Ensure pagerduty user does not exist.
Name can be pagerduty id, email address, or user name.
'''
return __salt__['pagerduty_util.resource_absent']('users',
['email', 'name', 'id'],
profile,
subdomain,
api_key,
**kwargs)
|
e340d8f3c36a026fb3b3f13d8f47dc9dc1b325ef
|
gears/views.py
|
gears/views.py
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path, absolute_path):
asset_attributes = AssetAttributes(environment, path)
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, document_root=None, insecure=False, **kwargs):
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
if normalized_path in environment.public_assets:
absolute_path = environment.find(normalized_path)
else:
absolute_path = None
if not absolute_path:
return staticfiles_serve(request, path, document_root=document_root,
insecure=insecure, **kwargs)
mimetype, encoding = mimetypes.guess_type(absolute_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(
build_asset(environment, normalized_path, absolute_path),
mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path):
if path not in environment.public_assets:
return
asset_attributes = AssetAttributes(environment, path)
absolute_path = environment.find(path)
if absolute_path:
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, **kwargs):
if not settings.DEBUG and not kwargs.get('insecure'):
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
asset = build_asset(environment, normalized_path)
if not asset:
return staticfiles_serve(request, path, **kwargs)
mimetype, encoding = mimetypes.guess_type(normalized_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(asset, mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
Move some code from serve view to build_asset function
|
Move some code from serve view to build_asset function
|
Python
|
isc
|
gears/django-gears,juliomenendez/django-gears,gears/django-gears,juliomenendez/django-gears,juliomenendez/django-gears,wiserthanever/django-gears,juliomenendez/django-gears,gears/django-gears,wiserthanever/django-gears,wiserthanever/django-gears,wiserthanever/django-gears
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path, absolute_path):
asset_attributes = AssetAttributes(environment, path)
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, document_root=None, insecure=False, **kwargs):
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
if normalized_path in environment.public_assets:
absolute_path = environment.find(normalized_path)
else:
absolute_path = None
if not absolute_path:
return staticfiles_serve(request, path, document_root=document_root,
insecure=insecure, **kwargs)
mimetype, encoding = mimetypes.guess_type(absolute_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(
build_asset(environment, normalized_path, absolute_path),
mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
Move some code from serve view to build_asset function
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path):
if path not in environment.public_assets:
return
asset_attributes = AssetAttributes(environment, path)
absolute_path = environment.find(path)
if absolute_path:
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, **kwargs):
if not settings.DEBUG and not kwargs.get('insecure'):
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
asset = build_asset(environment, normalized_path)
if not asset:
return staticfiles_serve(request, path, **kwargs)
mimetype, encoding = mimetypes.guess_type(normalized_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(asset, mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
<commit_before>import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path, absolute_path):
asset_attributes = AssetAttributes(environment, path)
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, document_root=None, insecure=False, **kwargs):
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
if normalized_path in environment.public_assets:
absolute_path = environment.find(normalized_path)
else:
absolute_path = None
if not absolute_path:
return staticfiles_serve(request, path, document_root=document_root,
insecure=insecure, **kwargs)
mimetype, encoding = mimetypes.guess_type(absolute_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(
build_asset(environment, normalized_path, absolute_path),
mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
<commit_msg>Move some code from serve view to build_asset function<commit_after>
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path):
if path not in environment.public_assets:
return
asset_attributes = AssetAttributes(environment, path)
absolute_path = environment.find(path)
if absolute_path:
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, **kwargs):
if not settings.DEBUG and not kwargs.get('insecure'):
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
asset = build_asset(environment, normalized_path)
if not asset:
return staticfiles_serve(request, path, **kwargs)
mimetype, encoding = mimetypes.guess_type(normalized_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(asset, mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path, absolute_path):
asset_attributes = AssetAttributes(environment, path)
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, document_root=None, insecure=False, **kwargs):
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
if normalized_path in environment.public_assets:
absolute_path = environment.find(normalized_path)
else:
absolute_path = None
if not absolute_path:
return staticfiles_serve(request, path, document_root=document_root,
insecure=insecure, **kwargs)
mimetype, encoding = mimetypes.guess_type(absolute_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(
build_asset(environment, normalized_path, absolute_path),
mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
Move some code from serve view to build_asset functionimport mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path):
if path not in environment.public_assets:
return
asset_attributes = AssetAttributes(environment, path)
absolute_path = environment.find(path)
if absolute_path:
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, **kwargs):
if not settings.DEBUG and not kwargs.get('insecure'):
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
asset = build_asset(environment, normalized_path)
if not asset:
return staticfiles_serve(request, path, **kwargs)
mimetype, encoding = mimetypes.guess_type(normalized_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(asset, mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
<commit_before>import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path, absolute_path):
asset_attributes = AssetAttributes(environment, path)
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, document_root=None, insecure=False, **kwargs):
if not settings.DEBUG and not insecure:
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
if normalized_path in environment.public_assets:
absolute_path = environment.find(normalized_path)
else:
absolute_path = None
if not absolute_path:
return staticfiles_serve(request, path, document_root=document_root,
insecure=insecure, **kwargs)
mimetype, encoding = mimetypes.guess_type(absolute_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(
build_asset(environment, normalized_path, absolute_path),
mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
<commit_msg>Move some code from serve view to build_asset function<commit_after>import mimetypes
import posixpath
import urllib
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.contrib.staticfiles.views import serve as staticfiles_serve
from django.http import HttpResponse
from .asset_attributes import AssetAttributes
from .assets import Asset, StaticAsset
from .settings import environment
def build_asset(environment, path):
if path not in environment.public_assets:
return
asset_attributes = AssetAttributes(environment, path)
absolute_path = environment.find(path)
if absolute_path:
if asset_attributes.get_processors():
return Asset(asset_attributes, absolute_path)
return StaticAsset(asset_attributes, absolute_path)
def serve(request, path, **kwargs):
if not settings.DEBUG and not kwargs.get('insecure'):
raise ImproperlyConfigured(
"The gears view can only be used in debug mode or if the "
"--insecure option of 'runserver' is used.")
normalized_path = posixpath.normpath(urllib.unquote(path)).lstrip('/')
asset = build_asset(environment, normalized_path)
if not asset:
return staticfiles_serve(request, path, **kwargs)
mimetype, encoding = mimetypes.guess_type(normalized_path)
mimetype = mimetype or 'application/octet-stream'
response = HttpResponse(asset, mimetype=mimetype)
if encoding:
response['Content-Encoding'] = encoding
return response
|
82457741a352602f6ef946e387070c77eb50781c
|
examples/macallan.py
|
examples/macallan.py
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
server.serve_forever()
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
print('Running locally on http://localhost:5000')
server.serve_forever()
|
Print a serving message in the example app
|
Print a serving message in the example app
|
Python
|
mit
|
nickfrostatx/malt
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
server.serve_forever()
Print a serving message in the example app
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
print('Running locally on http://localhost:5000')
server.serve_forever()
|
<commit_before># -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
server.serve_forever()
<commit_msg>Print a serving message in the example app<commit_after>
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
print('Running locally on http://localhost:5000')
server.serve_forever()
|
# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
server.serve_forever()
Print a serving message in the example app# -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
print('Running locally on http://localhost:5000')
server.serve_forever()
|
<commit_before># -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
server.serve_forever()
<commit_msg>Print a serving message in the example app<commit_after># -*- coding: utf-8 -*-
from malt import Malt, Response, json
from wsgiref.simple_server import make_server
app = Malt()
@app.get('/')
def hello(request):
return Response(request.url + '\n')
@app.post('/users')
def hello(request):
return Response('Creating new user\n')
@app.get('/tasks')
def hello(request):
return json({'tasks': [
'Buy groceries',
'Clean the patio',
'Take over the world',
]})
@app.post('/tasks')
def hello(request):
return Response('Adding a task!\n')
server = make_server('localhost', 5000, app)
print('Running locally on http://localhost:5000')
server.serve_forever()
|
a091db3e5d51da339ab3853f9188495a23410598
|
examples/settings.py
|
examples/settings.py
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
Reduce default resoution for example project
|
Reduce default resoution for example project
|
Python
|
isc
|
Contraz/demosys-py
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
Reduce default resoution for example project
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
<commit_before>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
<commit_msg>Reduce default resoution for example project<commit_after>
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
Reduce default resoution for example projectimport os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
<commit_before>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1920, 1080),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
<commit_msg>Reduce default resoution for example project<commit_after>import os
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
SCREENSHOT_PATH = None
OPENGL = {
"version": (3, 3),
}
WINDOW = {
"class": "demosys.context.pyqt.Window",
"size": (1280, 720),
"aspect_ratio": 16 / 9,
"fullscreen": False,
"resizable": False,
"title": "Examples",
"vsync": True,
"cursor": True,
"samples": 4,
}
HEADLESS_DURATION = 100.0
ROCKET = {
"mode": "editor",
"rps": 24,
"project": None,
"files": None,
}
|
a3d65892ef572b115de919f62929e093dfb27400
|
examples/json_editor.py
|
examples/json_editor.py
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import sys
from pyqode.qt import QtWidgets
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
# pygment_style = random.choice(modes.PYGMENTS_STYLES)
# logging.info('pygments style: %s', pygment_style)
# self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
# pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
Make example use random color scheme
|
Make example use random color scheme
|
Python
|
mit
|
pyQode/pyqode.json,pyQode/pyqode.json
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import sys
from pyqode.qt import QtWidgets
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
Make example use random color scheme
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
# pygment_style = random.choice(modes.PYGMENTS_STYLES)
# logging.info('pygments style: %s', pygment_style)
# self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
# pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
<commit_before>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import sys
from pyqode.qt import QtWidgets
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
<commit_msg>Make example use random color scheme<commit_after>
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
# pygment_style = random.choice(modes.PYGMENTS_STYLES)
# logging.info('pygments style: %s', pygment_style)
# self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
# pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import sys
from pyqode.qt import QtWidgets
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
Make example use random color scheme"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
# pygment_style = random.choice(modes.PYGMENTS_STYLES)
# logging.info('pygments style: %s', pygment_style)
# self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
# pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
<commit_before>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import sys
from pyqode.qt import QtWidgets
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
<commit_msg>Make example use random color scheme<commit_after>"""
This is a very basic usage example of the JSONCodeEdit.
The interface is minimalist, it will open a test file. You can open other
documents by pressing Ctrl+O
"""
import logging
import os
import random
import sys
from pyqode.qt import QtWidgets
from pyqode.core import api, modes
from pyqode.json.widgets import JSONCodeEdit
class Window(QtWidgets.QMainWindow):
def __init__(self):
super(Window, self).__init__()
self.setMinimumWidth(800)
self.setMinimumHeight(600)
self.editor = JSONCodeEdit(self)
self.setCentralWidget(self.editor)
self.editor.file.open(
os.path.abspath(os.path.join(
'..', 'test', 'files', 'example.json')))
# pygment_style = random.choice(modes.PYGMENTS_STYLES)
# logging.info('pygments style: %s', pygment_style)
# self.editor.syntax_highlighter.color_scheme = api.ColorScheme(
# pygment_style)
logging.basicConfig(level=logging.INFO)
app = QtWidgets.QApplication(sys.argv)
window = Window()
window.show()
app.exec_()
|
7d10c18c1feb0c61aee9d3a44c3a7fa24e4e3c25
|
code_snippets/guides-agentchecks-methods.py
|
code_snippets/guides-agentchecks-methods.py
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
self.count( ... ) # Sample a raw count metric
self.monotonic_count( ... ) # Sample an increasing counter metric
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
|
Revert "Document AgentCheck count and monotonic_count methods"
|
Revert "Document AgentCheck count and monotonic_count methods"
This reverts commit e731c3a4a8590f5cddd23fd2f9af265749f08a38.
|
Python
|
bsd-3-clause
|
inokappa/documentation,macobo/documentation,inokappa/documentation,jhotta/documentation,jhotta/documentation,jhotta/documentation,macobo/documentation,macobo/documentation,inokappa/documentation,jhotta/documentation,jhotta/documentation,jhotta/documentation,inokappa/documentation,inokappa/documentation,macobo/documentation,macobo/documentation
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
self.count( ... ) # Sample a raw count metric
self.monotonic_count( ... ) # Sample an increasing counter metric
Revert "Document AgentCheck count and monotonic_count methods"
This reverts commit e731c3a4a8590f5cddd23fd2f9af265749f08a38.
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
|
<commit_before>self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
self.count( ... ) # Sample a raw count metric
self.monotonic_count( ... ) # Sample an increasing counter metric
<commit_msg>Revert "Document AgentCheck count and monotonic_count methods"
This reverts commit e731c3a4a8590f5cddd23fd2f9af265749f08a38.<commit_after>
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
|
self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
self.count( ... ) # Sample a raw count metric
self.monotonic_count( ... ) # Sample an increasing counter metric
Revert "Document AgentCheck count and monotonic_count methods"
This reverts commit e731c3a4a8590f5cddd23fd2f9af265749f08a38.self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
|
<commit_before>self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
self.count( ... ) # Sample a raw count metric
self.monotonic_count( ... ) # Sample an increasing counter metric
<commit_msg>Revert "Document AgentCheck count and monotonic_count methods"
This reverts commit e731c3a4a8590f5cddd23fd2f9af265749f08a38.<commit_after>self.gauge( ... ) # Sample a gauge metric
self.increment( ... ) # Increment a counter metric
self.decrement( ... ) # Decrement a counter metric
self.histogram( ... ) # Sample a histogram metric
self.rate( ... ) # Sample a point, with the rate calculated at the end of the check
|
57a61538aecb9e102ee9e2e1365e80e3dbc8ed4f
|
adhocracy4/modules/apps.py
|
adhocracy4/modules/apps.py
|
from django.apps import AppConfig
class ModuleConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
from django.apps import AppConfig
class ModulesConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
Fix plural in modules app config
|
Fix plural in modules app config
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
from django.apps import AppConfig
class ModuleConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
Fix plural in modules app config
|
from django.apps import AppConfig
class ModulesConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
<commit_before>from django.apps import AppConfig
class ModuleConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
<commit_msg>Fix plural in modules app config<commit_after>
|
from django.apps import AppConfig
class ModulesConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
from django.apps import AppConfig
class ModuleConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
Fix plural in modules app configfrom django.apps import AppConfig
class ModulesConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
<commit_before>from django.apps import AppConfig
class ModuleConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
<commit_msg>Fix plural in modules app config<commit_after>from django.apps import AppConfig
class ModulesConfig(AppConfig):
name = 'adhocracy4.modules'
label = 'a4modules'
|
1706fd54e50c6e4a67c84ceaa17708ca9346efe8
|
qipipe/__init__.py
|
qipipe/__init__.py
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when there is an incompatible
public API change. The minor version is incremented when there
is a backward-compatible functionality change. The patch version
is incremented when there is a backward-compatible refactoring
or bug fix. All major, minor and patch version numbers begin at
1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when a significant feature
set is introduced. The minor version is incremented when there
is a functionality change. The patch version is incremented when
there is a refactoring or bug fix. All major, minor and patch
version numbers begin at 1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
Modify the version numbering guideline.
|
Modify the version numbering guideline.
|
Python
|
bsd-2-clause
|
ohsu-qin/qipipe
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when there is an incompatible
public API change. The minor version is incremented when there
is a backward-compatible functionality change. The patch version
is incremented when there is a backward-compatible refactoring
or bug fix. All major, minor and patch version numbers begin at
1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
Modify the version numbering guideline.
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when a significant feature
set is introduced. The minor version is incremented when there
is a functionality change. The patch version is incremented when
there is a refactoring or bug fix. All major, minor and patch
version numbers begin at 1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
<commit_before>"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when there is an incompatible
public API change. The minor version is incremented when there
is a backward-compatible functionality change. The patch version
is incremented when there is a backward-compatible refactoring
or bug fix. All major, minor and patch version numbers begin at
1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
<commit_msg>Modify the version numbering guideline.<commit_after>
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when a significant feature
set is introduced. The minor version is incremented when there
is a functionality change. The patch version is incremented when
there is a refactoring or bug fix. All major, minor and patch
version numbers begin at 1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when there is an incompatible
public API change. The minor version is incremented when there
is a backward-compatible functionality change. The patch version
is incremented when there is a backward-compatible refactoring
or bug fix. All major, minor and patch version numbers begin at
1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
Modify the version numbering guideline."""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when a significant feature
set is introduced. The minor version is incremented when there
is a functionality change. The patch version is incremented when
there is a refactoring or bug fix. All major, minor and patch
version numbers begin at 1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
<commit_before>"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when there is an incompatible
public API change. The minor version is incremented when there
is a backward-compatible functionality change. The patch version
is incremented when there is a backward-compatible refactoring
or bug fix. All major, minor and patch version numbers begin at
1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
<commit_msg>Modify the version numbering guideline.<commit_after>"""The top-level Quantitative Imaging Pipeline module."""
__version__ = '4.5.3'
"""
The one-based major.minor.patch version.
The version numbering scheme loosely follows http://semver.org/.
The major version is incremented when a significant feature
set is introduced. The minor version is incremented when there
is a functionality change. The patch version is incremented when
there is a refactoring or bug fix. All major, minor and patch
version numbers begin at 1.
"""
def project(name=None):
"""
Gets or sets the current XNAT project name.
The default project name is ``QIN``.
:param name: the XNAT project name to set, or None to get the
current project name
:return: the current XNAT project name
"""
if name:
project.name = name
elif not hasattr(project, 'name'):
project.name = None
return project.name or 'QIN'
|
6dd43f5fcd6af5582423af5a34a5fcf273026f1b
|
sirius/TI_V00/record_names.py
|
sirius/TI_V00/record_names.py
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ON':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ON':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-BO-KICKEX-INC':{}, # increment to SI injection kicker delay
'TI-SI-KICKIN-ON':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
#'TI-SI-KICKIN-INC':{}, # increment to SI injection kicker delay
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ENABLED':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ENABLED':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-SI-KICKIN-ENABLED':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
'TI-DELAY':{},
'TI-DELAY-INC':{},
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
-
|
Change names of timing pvs
|
Change names of timing pvs
|
Python
|
mit
|
lnls-fac/sirius
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ON':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ON':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-BO-KICKEX-INC':{}, # increment to SI injection kicker delay
'TI-SI-KICKIN-ON':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
#'TI-SI-KICKIN-INC':{}, # increment to SI injection kicker delay
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
Change names of timing pvs
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ENABLED':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ENABLED':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-SI-KICKIN-ENABLED':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
'TI-DELAY':{},
'TI-DELAY-INC':{},
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
-
|
<commit_before>
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ON':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ON':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-BO-KICKEX-INC':{}, # increment to SI injection kicker delay
'TI-SI-KICKIN-ON':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
#'TI-SI-KICKIN-INC':{}, # increment to SI injection kicker delay
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
<commit_msg>Change names of timing pvs<commit_after>
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ENABLED':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ENABLED':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-SI-KICKIN-ENABLED':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
'TI-DELAY':{},
'TI-DELAY-INC':{},
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
-
|
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ON':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ON':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-BO-KICKEX-INC':{}, # increment to SI injection kicker delay
'TI-SI-KICKIN-ON':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
#'TI-SI-KICKIN-INC':{}, # increment to SI injection kicker delay
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
Change names of timing pvs
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ENABLED':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ENABLED':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-SI-KICKIN-ENABLED':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
'TI-DELAY':{},
'TI-DELAY-INC':{},
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
-
|
<commit_before>
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ON':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ON':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-BO-KICKEX-INC':{}, # increment to SI injection kicker delay
'TI-SI-KICKIN-ON':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
#'TI-SI-KICKIN-INC':{}, # increment to SI injection kicker delay
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
<commit_msg>Change names of timing pvs<commit_after>
import sirius
def get_record_names(family_name = None):
"""Return a dictionary of record names for given subsystem
each entry is another dictionary of model families whose
values are the indices in the pyaccel model of the magnets
that belong to the family. The magnet models ca be segmented,
in which case the value is a python list of lists."""
if family_name == None:
families = ['ti']
record_names_dict = {}
for i in range(len(families)):
record_names_dict.update(get_record_names(families[i]))
return record_names_dict
if family_name.lower() == 'ti':
_dict = {
'TI-CYCLE':{}, # when set starts entire injection cycle
'TI-BO-KICKIN-ENABLED':{}, # whether to pulse BO injection kicker
'TI-BO-KICKIN-DELAY':{}, # when to pulse BO injection kickes
'TI-BO-KICKEX-ENABLED':{}, # whether to pulse BO extraction kicker
'TI-BO-KICKEX-DELAY':{}, # when to pulse BO extraction kicker
'TI-SI-KICKIN-ENABLED':{} , # whether to pulse SI injection kicker
'TI-SI-KICKIN-DELAY':{}, # when to pulse SI injection kicker
'TI-DELAY':{},
'TI-DELAY-INC':{},
}
return _dict
else:
raise Exception('Family name %s not found'%family_name)
-
|
d6c59f9f76b41945fb363f94d65956fbf1a18dac
|
tests/python_tests/test_routines.py
|
tests/python_tests/test_routines.py
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
def test_dummy():
pass
|
Add dummy test for flake8
|
Add dummy test for flake8
|
Python
|
mit
|
hvy/chainer,niboshi/chainer,chainer/chainer,ktnyt/chainer,wkentaro/chainer,tkerola/chainer,wkentaro/chainer,pfnet/chainer,niboshi/chainer,jnishi/chainer,hvy/chainer,okuta/chainer,keisuke-umezawa/chainer,chainer/chainer,ktnyt/chainer,okuta/chainer,wkentaro/chainer,keisuke-umezawa/chainer,jnishi/chainer,niboshi/chainer,chainer/chainer,okuta/chainer,keisuke-umezawa/chainer,jnishi/chainer,jnishi/chainer,okuta/chainer,wkentaro/chainer,ktnyt/chainer,ktnyt/chainer,keisuke-umezawa/chainer,hvy/chainer,niboshi/chainer,hvy/chainer,chainer/chainer
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
Add dummy test for flake8
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
def test_dummy():
pass
|
<commit_before># TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
<commit_msg>Add dummy test for flake8<commit_after>
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
def test_dummy():
pass
|
# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
Add dummy test for flake8# TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
def test_dummy():
pass
|
<commit_before># TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
<commit_msg>Add dummy test for flake8<commit_after># TODO(niboshi): Currently this file is just a placeholder for future migration of routine tests from test_array.py.
# In order to do that, we need to somehow share check logics/test parameters with test_array.py.
def test_dummy():
pass
|
d207bf14b30636959e09659607bddcf4e349852b
|
django_migration_linter/sql_analyser/__init__.py
|
django_migration_linter/sql_analyser/__init__.py
|
from .analyser import analyse_sql_statements # noqa
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
|
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
from .analyser import analyse_sql_statements # noqa isort:skip
|
Fix import order which was important
|
Fix import order which was important
|
Python
|
apache-2.0
|
3YOURMIND/django-migration-linter
|
from .analyser import analyse_sql_statements # noqa
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
Fix import order which was important
|
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
from .analyser import analyse_sql_statements # noqa isort:skip
|
<commit_before>from .analyser import analyse_sql_statements # noqa
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
<commit_msg>Fix import order which was important<commit_after>
|
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
from .analyser import analyse_sql_statements # noqa isort:skip
|
from .analyser import analyse_sql_statements # noqa
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
Fix import order which was importantfrom .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
from .analyser import analyse_sql_statements # noqa isort:skip
|
<commit_before>from .analyser import analyse_sql_statements # noqa
from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
<commit_msg>Fix import order which was important<commit_after>from .base import BaseAnalyser # noqa
from .mysql import MySqlAnalyser # noqa
from .postgresql import PostgresqlAnalyser # noqa
from .sqlite import SqliteAnalyser # noqa
from .analyser import analyse_sql_statements # noqa isort:skip
|
5f113ffd768431991f87cea1f5f804a25a1777d3
|
frappe/patches/v13_0/replace_old_data_import.py
|
frappe/patches/v13_0/replace_old_data_import.py
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql(
"""INSERT INTO `tabData Import Legacy` SELECT * FROM `tabData Import`"""
)
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
Use rename doc instead of manually moving the data
|
fix: Use rename doc instead of manually moving the data
|
Python
|
mit
|
StrellaGroup/frappe,saurabh6790/frappe,mhbu50/frappe,yashodhank/frappe,frappe/frappe,yashodhank/frappe,almeidapaulopt/frappe,yashodhank/frappe,frappe/frappe,mhbu50/frappe,almeidapaulopt/frappe,adityahase/frappe,saurabh6790/frappe,frappe/frappe,adityahase/frappe,mhbu50/frappe,adityahase/frappe,almeidapaulopt/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,adityahase/frappe,StrellaGroup/frappe,saurabh6790/frappe,saurabh6790/frappe,StrellaGroup/frappe
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql(
"""INSERT INTO `tabData Import Legacy` SELECT * FROM `tabData Import`"""
)
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
fix: Use rename doc instead of manually moving the data
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
<commit_before># Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql(
"""INSERT INTO `tabData Import Legacy` SELECT * FROM `tabData Import`"""
)
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
<commit_msg>fix: Use rename doc instead of manually moving the data<commit_after>
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql(
"""INSERT INTO `tabData Import Legacy` SELECT * FROM `tabData Import`"""
)
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
fix: Use rename doc instead of manually moving the data# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
<commit_before># Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.db.sql(
"""INSERT INTO `tabData Import Legacy` SELECT * FROM `tabData Import`"""
)
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
<commit_msg>fix: Use rename doc instead of manually moving the data<commit_after># Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import frappe
def execute():
frappe.rename_doc('DocType', 'Data Import', 'Data Import Legacy')
frappe.db.commit()
frappe.db.sql("DROP TABLE IF EXISTS `tabData Import`")
frappe.reload_doc("core", "doctype", "data_import")
frappe.get_doc("DocType", "Data Import").on_update()
|
216294a0ea36c2fbabb43c31ce4fde3a9eee4bf3
|
anchor/models.py
|
anchor/models.py
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.volumes = data.get('volumes')
self.cbs_hosts = data.get('cbs_hosts')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
Update model for CBS host and volume information
|
Update model for CBS host and volume information
|
Python
|
apache-2.0
|
oldarmyc/anchor,oldarmyc/anchor,oldarmyc/anchor
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
Update model for CBS host and volume information
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.volumes = data.get('volumes')
self.cbs_hosts = data.get('cbs_hosts')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
<commit_msg>Update model for CBS host and volume information<commit_after>
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.volumes = data.get('volumes')
self.cbs_hosts = data.get('cbs_hosts')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
Update model for CBS host and volume information# Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.volumes = data.get('volumes')
self.cbs_hosts = data.get('cbs_hosts')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
<commit_before># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
<commit_msg>Update model for CBS host and volume information<commit_after># Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from datetime import datetime
from dateutil import tz
from dateutil.relativedelta import relativedelta
UTC = tz.tzutc()
class Region:
def __init__(self, data):
self.name = data.get('name').title()
self.abbreviation = data.get('abbreviation').upper()
self.active = bool(data.get('active'))
class Account:
def __init__(self, data):
self.account_number = data.get('account_number')
self.cache_expiration = self.set_expiration()
self.host_servers = data.get('host_servers')
self.public_zones = data.get('public_zones')
self.region = data.get('region').lower()
self.servers = data.get('servers')
self.volumes = data.get('volumes')
self.cbs_hosts = data.get('cbs_hosts')
self.lookup_type = data.get('lookup_type')
def set_expiration(self):
return datetime.now(UTC) + relativedelta(days=1)
|
115071fc5a8a631addb762e34f0af07e755dad1b
|
hopsutil/tensorboard.py
|
hopsutil/tensorboard.py
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
tb_url = "http://{0}:{1}".format(addr, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
host = socket.gethostname()
tb_url = "http://{0}:{1}".format(host, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
Use hostname instead of ip
|
Use hostname instead of ip
|
Python
|
apache-2.0
|
hopshadoop/hops-util-py,hopshadoop/hops-util-py
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
tb_url = "http://{0}:{1}".format(addr, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)Use hostname instead of ip
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
host = socket.gethostname()
tb_url = "http://{0}:{1}".format(host, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
<commit_before>"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
tb_url = "http://{0}:{1}".format(addr, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)<commit_msg>Use hostname instead of ip<commit_after>
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
host = socket.gethostname()
tb_url = "http://{0}:{1}".format(host, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
tb_url = "http://{0}:{1}".format(addr, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)Use hostname instead of ip"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
host = socket.gethostname()
tb_url = "http://{0}:{1}".format(host, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
<commit_before>"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
tb_url = "http://{0}:{1}".format(addr, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)<commit_msg>Use hostname instead of ip<commit_after>"""
Utility functions to retrieve information about available services and setting up security for the Hops platform.
These utils facilitates development by hiding complexity for programs interacting with Hops services.
"""
import socket
import subprocess
import os
import pydoop.hdfs as pyhdfs
def register(logdir):
#find free port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('',0))
addr, port = s.getsockname()
s.close()
#let tb bind to port
pypath = os.getenv("PYSPARK_PYTHON")
pydir = os.path.dirname(pypath)
subprocess.Popen([pypath, "%s/tensorboard"%pydir, "--logdir=%s"%logdir, "--port=%d"%port, "--debug"])
host = socket.gethostname()
tb_url = "http://{0}:{1}".format(host, port)
#dump tb host:port to hdfs
hops_user = os.environ["USER"];
hops_user_split = hops_user.split("__");
project = hops_user_split[0];
pyhdfs.dump(tb_url, "hdfs:///Projects/" + project + "/Jupyter/.jupyter.tensorboard", user=hops_user)
|
31fb8b576edda4d88685fd45537f68d3f067ae7b
|
source/cytoplasm/errors.py
|
source/cytoplasm/errors.py
|
class ControllerError(StandardError): pass
class InterpreterError(StandardError): pass
|
class CytoplasmError(Exception): pass
class ControllerError(CytoplasmError): pass
class InterpreterError(CytoplasmError): pass
|
Use Exception instead of StandardError
|
Use Exception instead of StandardError
Python 3 doesn't have StandardError...
|
Python
|
mit
|
startling/cytoplasm
|
class ControllerError(StandardError): pass
class InterpreterError(StandardError): pass
Use Exception instead of StandardError
Python 3 doesn't have StandardError...
|
class CytoplasmError(Exception): pass
class ControllerError(CytoplasmError): pass
class InterpreterError(CytoplasmError): pass
|
<commit_before>class ControllerError(StandardError): pass
class InterpreterError(StandardError): pass
<commit_msg>Use Exception instead of StandardError
Python 3 doesn't have StandardError...<commit_after>
|
class CytoplasmError(Exception): pass
class ControllerError(CytoplasmError): pass
class InterpreterError(CytoplasmError): pass
|
class ControllerError(StandardError): pass
class InterpreterError(StandardError): pass
Use Exception instead of StandardError
Python 3 doesn't have StandardError...class CytoplasmError(Exception): pass
class ControllerError(CytoplasmError): pass
class InterpreterError(CytoplasmError): pass
|
<commit_before>class ControllerError(StandardError): pass
class InterpreterError(StandardError): pass
<commit_msg>Use Exception instead of StandardError
Python 3 doesn't have StandardError...<commit_after>class CytoplasmError(Exception): pass
class ControllerError(CytoplasmError): pass
class InterpreterError(CytoplasmError): pass
|
a85c21dc324750c3fa7e96d2d0baf3c45657201e
|
sconsole/static.py
|
sconsole/static.py
|
'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
'''
Holds static data components, like the palette
'''
import pprint
def tree_seed():
return {'jids': [
{'_|-76789876543456787654': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
{'_|-76789876543456787655': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
],
}
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write('{0}\n'.format(pprint.pformat(msg)))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
Add convenience function to load in some test data
|
Add convenience function to load in some test data
|
Python
|
apache-2.0
|
saltstack/salt-console
|
'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
Add convenience function to load in some test data
|
'''
Holds static data components, like the palette
'''
import pprint
def tree_seed():
return {'jids': [
{'_|-76789876543456787654': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
{'_|-76789876543456787655': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
],
}
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write('{0}\n'.format(pprint.pformat(msg)))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
<commit_before>'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
<commit_msg>Add convenience function to load in some test data<commit_after>
|
'''
Holds static data components, like the palette
'''
import pprint
def tree_seed():
return {'jids': [
{'_|-76789876543456787654': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
{'_|-76789876543456787655': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
],
}
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write('{0}\n'.format(pprint.pformat(msg)))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
Add convenience function to load in some test data'''
Holds static data components, like the palette
'''
import pprint
def tree_seed():
return {'jids': [
{'_|-76789876543456787654': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
{'_|-76789876543456787655': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
],
}
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write('{0}\n'.format(pprint.pformat(msg)))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
<commit_before>'''
Holds static data components, like the palette
'''
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write(str(msg))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
<commit_msg>Add convenience function to load in some test data<commit_after>'''
Holds static data components, like the palette
'''
import pprint
def tree_seed():
return {'jids': [
{'_|-76789876543456787654': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
{'_|-76789876543456787655': [{'localhost': {'return': True}},
{'otherhost': {'return': True}}],},
],
}
def msg(msg, logfile='console_log.txt'):
'''
Send a message to a logfile, defaults to console_log.txt.
This is useful to replace a print statement since curses does put
a bit of a damper on this
'''
with open(logfile, 'a+') as fp_:
fp_.write('{0}\n'.format(pprint.pformat(msg)))
def get_palette(theme='std'):
'''
Return the preferred palette theme
Themes:
std
The standard theme used by the console
'''
if theme == 'bright':
return [
('banner', 'white', 'dark blue')
]
else:
return [
('banner', 'white', 'dark blue')
]
|
401e60837c13af5a350b1487225a296c2e803069
|
Lib/test/test_dumbdbm.py
|
Lib/test/test_dumbdbm.py
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
Use a saner test filename, to work on Windows.
|
Use a saner test filename, to work on Windows.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
Use a saner test filename, to work on Windows.
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
<commit_before>#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
<commit_msg>Use a saner test filename, to work on Windows.<commit_after>
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
Use a saner test filename, to work on Windows.#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
<commit_before>#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose
filename = '/tmp/delete_me'
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
<commit_msg>Use a saner test filename, to work on Windows.<commit_after>#! /usr/bin/env python
"""Test script for the dumbdbm module
Original by Roger E. Masse
"""
# XXX This test is a disgrace. It doesn't test that it works.
import dumbdbm as dbm
from dumbdbm import error
from test_support import verbose, TESTFN as filename
d = dbm.open(filename, 'c')
d['a'] = 'b'
d['12345678910'] = '019237410982340912840198242'
d.keys()
if d.has_key('a'):
if verbose:
print 'Test dbm keys: ', d.keys()
d.close()
d = dbm.open(filename, 'r')
d.close()
d = dbm.open(filename, 'w')
d.close()
d = dbm.open(filename, 'n')
d.close()
import os
def rm(fn):
try:
os.unlink(fn)
except os.error:
pass
rm(filename + '.dir')
rm(filename + '.dat')
rm(filename + '.bak')
|
37ade5f4ce1feb44cdb7f8de1e373f5085c77a40
|
socrates.py
|
socrates.py
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
parser.add_option('-r', '--run', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
if options.run:
import SimpleHTTPServer
import SocketServer
import os
os.chdir('blog/deploy')
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
|
Add simple server for testing purposes.
|
Add simple server for testing purposes.
|
Python
|
bsd-3-clause
|
thurloat/socrates,thurloat/socrates
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
Add simple server for testing purposes.
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
parser.add_option('-r', '--run', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
if options.run:
import SimpleHTTPServer
import SocketServer
import os
os.chdir('blog/deploy')
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
|
<commit_before>from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
<commit_msg>Add simple server for testing purposes.<commit_after>
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
parser.add_option('-r', '--run', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
if options.run:
import SimpleHTTPServer
import SocketServer
import os
os.chdir('blog/deploy')
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
|
from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
Add simple server for testing purposes.from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
parser.add_option('-r', '--run', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
if options.run:
import SimpleHTTPServer
import SocketServer
import os
os.chdir('blog/deploy')
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
|
<commit_before>from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
<commit_msg>Add simple server for testing purposes.<commit_after>from socrates.main import main
from socrates.bootstrap import run
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-i', '--init', action='store_true', help='Some help')
parser.add_option('-g', '--generate', action='store_true', help='Some help')
parser.add_option('-r', '--run', action='store_true', help='Some help')
options, args = parser.parse_args()
if options.init:
run()
if options.generate:
main()
if options.run:
import SimpleHTTPServer
import SocketServer
import os
os.chdir('blog/deploy')
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
|
cf245e71e770d21db8a48a74f8833d1099157e73
|
txircd/modules/ircv3/multiprefix.py
|
txircd/modules/ircv3/multiprefix.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def unload(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-multi-prefix" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-multi-prefix"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("multi-prefix")
def unload(self):
self.ircd.dataCache["unloading-multi-prefix"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-multi-prefix"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
Reduce undoings of multi-prefix on users
|
Reduce undoings of multi-prefix on users
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def unload(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()Reduce undoings of multi-prefix on users
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-multi-prefix" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-multi-prefix"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("multi-prefix")
def unload(self):
self.ircd.dataCache["unloading-multi-prefix"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-multi-prefix"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def unload(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()<commit_msg>Reduce undoings of multi-prefix on users<commit_after>
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-multi-prefix" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-multi-prefix"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("multi-prefix")
def unload(self):
self.ircd.dataCache["unloading-multi-prefix"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-multi-prefix"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def unload(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()Reduce undoings of multi-prefix on usersfrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-multi-prefix" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-multi-prefix"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("multi-prefix")
def unload(self):
self.ircd.dataCache["unloading-multi-prefix"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-multi-prefix"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
<commit_before>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def unload(self):
if "cap-add" in self.ircd.moduleFunctionCache:
self.ircd.moduleFunctionCache["cap-add"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()<commit_msg>Reduce undoings of multi-prefix on users<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class MultiPrefix(ModuleData):
implements(IPlugin, IModuleData)
name = "MultiPrefix"
def actions(self):
return [ ("channelstatuses", 2, self.allStatuses),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-multi-prefix" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-multi-prefix"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("multi-prefix")
def unload(self):
self.ircd.dataCache["unloading-multi-prefix"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-multi-prefix"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("multi-prefix")
def addCapability(self, capList):
capList.append("multi-prefix")
def allStatuses(self, channel, user, requestingUser):
if "capabilities" not in requestingUser.cache or "multi-prefix" not in requestingUser.cache["capabilities"]:
return None
if user not in channel.users:
return ""
statusList = []
for status in channel.users[user]["status"]:
statusList.append(self.ircd.channelStatuses[status][0])
return "".join(statusList)
multiPrefix = MultiPrefix()
|
1400a71d9827d76f14e70d4e8310dd20b9b47af4
|
life/life.py
|
life/life.py
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('==============\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('=' * boardSize[0] +'\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
Make separator line match width of board
|
Make separator line match width of board
|
Python
|
bsd-2-clause
|
bladams/golf
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('==============\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
Make separator line match width of board
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('=' * boardSize[0] +'\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
<commit_before>import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('==============\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
<commit_msg>Make separator line match width of board<commit_after>
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('=' * boardSize[0] +'\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('==============\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
Make separator line match width of boardimport sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('=' * boardSize[0] +'\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
<commit_before>import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('==============\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
<commit_msg>Make separator line match width of board<commit_after>import sys, random, time
boardSize = (10,10)
while True: foo, bar, baz, globals()['board'] = None if globals().get('board') is None else [
(
[sys.stdout.write('X' if cell else ' ') for cell in row],
sys.stdout.write('\n')
) for row in board
], time.sleep(1), sys.stdout.write('=' * boardSize[0] +'\n'), [
[random.random() < 0.5 for i in range(boardSize[0])] for j in range(boardSize[1])
] if 'board' not in globals() else [
map(
lambda z: (z[1] in (2,3) and board[y][z[0]]) or z[1]==3,
[
(
x,
sum(
[
int(
y0 in range(len(board)) and x0 in range(len(board[y0])) and board[y0][x0]
) for x0,y0 in (
(x - 1, y - 1),
(x, y -1),
(x + 1, y - 1),
(x - 1, y),
(x + 1, y),
(x - 1, y + 1),
(x, y + 1),
(x + 1, y + 1)
)
]
)
) for x in range(len(board[y]))]
) for y in range(len(board))
]
|
12f835d9060decfc675c81f7a1499b373b78f4cc
|
TrevorNet/tests/test_idx.py
|
TrevorNet/tests/test_idx.py
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
def _get_somebytes():
header = b'\x00\x00\x0C\x02'
dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02'
data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02'
data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04'
return header + dimensionsizes + data
_somebytes = _get_somebytes()
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
print(data, _somebytes)
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
Fix issue where idx test uses wrong bytes object
|
Fix issue where idx test uses wrong bytes object
Forgot to include the sizes of each dimension
|
Python
|
mit
|
tmerr/trevornet
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
Fix issue where idx test uses wrong bytes object
Forgot to include the sizes of each dimension
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
def _get_somebytes():
header = b'\x00\x00\x0C\x02'
dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02'
data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02'
data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04'
return header + dimensionsizes + data
_somebytes = _get_somebytes()
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
print(data, _somebytes)
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
<commit_before>from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
<commit_msg>Fix issue where idx test uses wrong bytes object
Forgot to include the sizes of each dimension<commit_after>
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
def _get_somebytes():
header = b'\x00\x00\x0C\x02'
dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02'
data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02'
data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04'
return header + dimensionsizes + data
_somebytes = _get_somebytes()
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
print(data, _somebytes)
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
Fix issue where idx test uses wrong bytes object
Forgot to include the sizes of each dimensionfrom .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
def _get_somebytes():
header = b'\x00\x00\x0C\x02'
dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02'
data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02'
data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04'
return header + dimensionsizes + data
_somebytes = _get_somebytes()
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
print(data, _somebytes)
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
<commit_before>from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
_somebytes = b'\x00\x00\x0C\x02' + b'\x01\x02\x03\x04'
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
<commit_msg>Fix issue where idx test uses wrong bytes object
Forgot to include the sizes of each dimension<commit_after>from .. import idx
import os
def test__count_dimensions():
yield check__count_dimensions, 9, 0
yield check__count_dimensions, [1, 2], 1
yield check__count_dimensions, [[1, 2], [3, 6, 2]], 2
yield check__count_dimensions, [[[1,2], [2]]], 3
def check__count_dimensions(lst, i):
assert idx._count_dimensions(lst) == i
# these two are equivalent according to the format on http://yann.lecun.com/exdb/mnist/
_somelist = [[1, 2], [3, 4]]
def _get_somebytes():
header = b'\x00\x00\x0C\x02'
dimensionsizes = b'\x00\x00\x00\x02' + b'\x00\x00\x00\x02'
data = b'\x00\x00\x00\x01' + b'\x00\x00\x00\x02'
data += b'\x00\x00\x00\x03' + b'\x00\x00\x00\x04'
return header + dimensionsizes + data
_somebytes = _get_somebytes()
def test_list_to_idx():
data = idx.list_to_idx(_somelist, 'i')
print(data, _somebytes)
assert data == _somebytes
def test_idx_to_list():
lst = idx.idx_to_list(_somebytes)
assert lst == _somelist
|
f590080fc4d431b333f73ad548a50bc24d4fcf5b
|
fuzzer/main.py
|
fuzzer/main.py
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(t)
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(bytes(t, 'ascii'))
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
Send char string instead of widechar string
|
Send char string instead of widechar string
|
Python
|
apache-2.0
|
jaybosamiya/fuzzing-numpy,jaybosamiya/fuzzing-numpy,jaybosamiya/fuzzing-numpy
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(t)
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
Send char string instead of widechar string
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(bytes(t, 'ascii'))
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
<commit_before>import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(t)
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
<commit_msg>Send char string instead of widechar string<commit_after>
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(bytes(t, 'ascii'))
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(t)
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
Send char string instead of widechar stringimport generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(bytes(t, 'ascii'))
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
<commit_before>import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(t)
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
<commit_msg>Send char string instead of widechar string<commit_after>import generator
from ctypes import CDLL
import numpy as np
# Initializes the harness and sets it up for work
harness = CDLL("harness/harness.so")
while True:
t = generator.generate()
harness.register_testcase(bytes(t, 'ascii'))
try:
exec(t, {'np':np})
except:
# If the exec fails, then we should not store
continue
generator.register(t)
|
bc22dbd750228c546bdf47b26957154448bb2c5b
|
atrium/api/bleachconfig.py
|
atrium/api/bleachconfig.py
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img', 'br'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
Add line break to allowed HTML tags
|
Add line break to allowed HTML tags
|
Python
|
apache-2.0
|
hugoatease/atrium,hugoatease/atrium,hugoatease/atrium
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']Add line break to allowed HTML tags
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img', 'br'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
<commit_before>import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']<commit_msg>Add line break to allowed HTML tags<commit_after>
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img', 'br'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']Add line break to allowed HTML tagsimport bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img', 'br'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
<commit_before>import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']<commit_msg>Add line break to allowed HTML tags<commit_after>import bleach
ALLOWED_TAGS = bleach.ALLOWED_TAGS + [
'div', 'span', 's', 'u', 'img', 'br'
]
ALLOWED_STYLES = bleach.ALLOWED_STYLES + [
'font-weight', 'font-family', 'font-size'
]
ALLOWED_ATTRIBUTES = bleach.ALLOWED_ATTRIBUTES
ALLOWED_ATTRIBUTES['*'] = ['style']
|
627729380b8fbd6d1b4e4eec0362418dbf698d55
|
libs/qpanel/upgrader.py
|
libs/qpanel/upgrader.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
Change url to get stable version number
|
Change url to get stable version number
|
Python
|
mit
|
roramirez/qpanel,roramirez/qpanel,skazancev/qpanel,skazancev/qpanel,skazancev/qpanel,roramirez/qpanel,roramirez/qpanel,skazancev/qpanel
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
Change url to get stable version number
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
<commit_msg>Change url to get stable version number<commit_after>
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
Change url to get stable version number# -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
<commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://raw.githubusercontent.com/roramirez/qpanel' + \
'/%s/VERSION' % BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
<commit_msg>Change url to get stable version number<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2015-2016 Rodrigo Ramírez Norambuena <a@rodrigoramirez.com>
#
from urllib2 import Request, urlopen
from distutils.version import LooseVersion
BRANCH = 'stable'
REPO = 'git@github.com:roramirez/qpanel.git'
URL_STABLE_VERSION = 'https://rodrigoramirez.com/qpanel/version/' + BRANCH
def require_upgrade():
a = LooseVersion(get_current_version())
b = LooseVersion(get_stable_version())
if a < b:
return True
return False
# InmplementME
def last_check_update():
return True
def get_current_version():
current_version = open('VERSION')
return __first_line(current_version.read())
def get_stable_version():
stable_version = __get_data_url(URL_STABLE_VERSION)
return __first_line(stable_version)
def __get_data_url(url):
req = Request(url)
try:
response = urlopen(req)
return response.read()
except:
return None
def __first_line(content):
tmp = ''
if content is not None:
tmp = content.split('\n')
if len(tmp) > 1:
return tmp[0]
return tmp
|
798a716cb6c3acd6e636d3b9cab755950ead5539
|
Seeder/voting/signals.py
|
Seeder/voting/signals.py
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract(source=source)
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract()
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
Fix process_voting_round to reflect contract model
|
Fix process_voting_round to reflect contract model
|
Python
|
mit
|
WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder,WebArchivCZ/Seeder
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract(source=source)
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
Fix process_voting_round to reflect contract model
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract()
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
<commit_before># pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract(source=source)
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
<commit_msg>Fix process_voting_round to reflect contract model<commit_after>
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract()
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract(source=source)
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
Fix process_voting_round to reflect contract model# pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract()
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
<commit_before># pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract(source=source)
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
<commit_msg>Fix process_voting_round to reflect contract model<commit_after># pylint: disable=W0613
from django.dispatch import receiver
from django.db.models.signals import post_save
from voting import constants
from source.models import Source
from voting.models import VotingRound
from source import constants as source_constants
from contracts.models import Contract
@receiver(signal=post_save, sender=Source)
def create_voting_round(instance, created, **kwargs):
"""
Creates a voting round after new Source is created.
"""
if created:
voting_round = VotingRound(source=instance)
voting_round.save()
@receiver(signal=post_save, sender=VotingRound)
def process_voting_round(instance, created, **kwargs):
"""
Edits Source according to decision made in voting round.
If source already has valid contract then we can switch directly
to running state.
"""
if not created:
source = instance.source
if instance.state == constants.VOTE_APPROVE:
if source.contract_set.valid():
source.state = source_constants.STATE_RUNNING
source.save()
return
else:
contract = Contract()
contract.publisher = source.publisher
contract.save()
contract.sources.add(source)
source.state = constants.VOTE_TO_SOURCE[instance.state]
source.save()
|
856207c8399d94e99a6f2ffb1e10befecb6150cf
|
src/generate-jobs/calculate_quad_key.py
|
src/generate-jobs/calculate_quad_key.py
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
Fix line endings in CSV and stdout typo
|
Fix line endings in CSV and stdout typo
|
Python
|
mit
|
geometalab/osm2vectortiles,geometalab/osm2vectortiles,osm2vectortiles/osm2vectortiles,osm2vectortiles/osm2vectortiles
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
Fix line endings in CSV and stdout typo
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
<commit_before>#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
<commit_msg>Fix line endings in CSV and stdout typo<commit_after>
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
Fix line endings in CSV and stdout typo#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
<commit_before>#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import system
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(system.out)
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line,
quad_tree(int(x), int(y), int(z))]
)
<commit_msg>Fix line endings in CSV and stdout typo<commit_after>#!/usr/bin/env python
"""Calculate QuadKey for TSV file and append it as column
Usage:
calculate_quad_key.py <list_file>
calculate_quad_key.py (-h | --help)
calculate_quad_key.py --version
Options:
-h --help Show this screen.
--version Show version.
"""
import sys
import csv
from docopt import docopt
def quad_tree(tx, ty, zoom):
"""
Converts XYZ tile coordinates to Microsoft QuadTree
http://www.maptiler.org/google-maps-coordinates-tile-bounds-projection/
"""
quad_key = ''
for i in range(zoom, 0, -1):
digit = 0
mask = 1 << (i-1)
if (tx & mask) != 0:
digit += 1
if (ty & mask) != 0:
digit += 2
quad_key += str(digit)
return quad_key
if __name__ == '__main__':
args = docopt(__doc__, version='0.1')
writer = csv.writer(sys.stdout, delimiter='\t')
with open(args['<list_file>'], "r") as file_handle:
for line in file_handle:
z, x, y = line.split('/')
writer.writerow([
line.strip(),
quad_tree(int(x), int(y), int(z))]
)
|
74ce850d7db766328e2931f5a8119b7e2e5b1ded
|
examples/basic_example.py
|
examples/basic_example.py
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
Switch to main method in examples
|
Switch to main method in examples
|
Python
|
mit
|
ALSchwalm/sparqllib
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
Switch to main method in examples
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
<commit_msg>Switch to main method in examples<commit_after>
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
Switch to main method in examples#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
if __name__ == "__main__":
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
<commit_msg>Switch to main method in examples<commit_after>#!/usr/bin/env python
'''
A simple script using sparqllib and rdflib to retrieve a JSON representation
of some information about Barack Obama from dbpedia.
'''
from sparqllib import Query
from rdflib import BNode, Literal
from rdflib.namespace import FOAF
from pprint import pprint
def main():
# construct the query variables (the explict names are optional)
obama, relation, value = BNode("Obama"), BNode("relation"), BNode("value")
# construct the query itself, selecting the relation and value variables
q = Query(result_vars=[relation, value])
# get everyone with the name Barack Obama
q.add(subject=obama, relationship=FOAF.name,
object=Literal("Barack Obama", lang="en"))
# get every relation these people have to any object
q.add(subject=obama, relationship=relation, object=value)
# limit the results to the first 50 distince pairs
q.result_limit = 50
print(str(q))
print(pprint(q.execute()))
if __name__ == "__main__":
main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.