commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c88efde14ea79419a69a3459b5ba9ba19332fffd
|
python/algorithms/sorting/quicksort.py
|
python/algorithms/sorting/quicksort.py
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
if len(items) < 2:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if len(items) < 2:
return
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
Move redundant check to first point of contact
|
Move redundant check to first point of contact
|
Python
|
mit
|
vilisimo/ads,vilisimo/ads
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
if len(items) < 2:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
Move redundant check to first point of contact
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if len(items) < 2:
return
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
<commit_before>import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
if len(items) < 2:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
<commit_msg>Move redundant check to first point of contact<commit_after>
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if len(items) < 2:
return
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
if len(items) < 2:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
Move redundant check to first point of contactimport random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if len(items) < 2:
return
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
<commit_before>import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
if len(items) < 2:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
<commit_msg>Move redundant check to first point of contact<commit_after>import random
def sort(items):
if items is None:
raise TypeError("Collection cannot be of type None")
if len(items) < 2:
return items
pivot = random.randint(0, len(items) - 1)
greater = []
less = []
for index in range(0, len(items)):
if index == pivot:
continue
if items[index] > items[pivot]:
greater.append(items[index])
else:
less.append(items[index])
return sort(less) + [items[pivot]] + sort(greater)
def quicksort(items):
""" In-place quicksort with random pivot """
if len(items) < 2:
return
if items is None:
raise TypeError("Collection cannot be of type None")
_quicksort(items, 0, len(items) - 1)
def _quicksort(items, first, last):
if first >= last:
return
pivot = items[random.randint(first, last)]
head, tail = first, last
while head <= tail:
while items[head] < pivot:
head += 1
while items[tail] > pivot:
tail -= 1
if head <= tail:
items[head], items[tail] = items[tail], items[head]
head += 1
tail -= 1
_quicksort(items, first, tail)
_quicksort(items, head, last)
|
c90a934366d81e759094f94469774abcf2e8f098
|
qllr/blueprints/submission/__init__.py
|
qllr/blueprints/submission/__init__.py
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if result["ok"] == False:
raise HTTPException(422, result["message"])
else:
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
else:
return JSONResponse(result)
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
return JSONResponse(result)
|
Remove result checking in http_stats_submit, as submit_match raises exception on fail
|
Remove result checking in http_stats_submit, as submit_match raises exception on fail
|
Python
|
agpl-3.0
|
em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings,em92/quakelive-local-ratings,em92/pickup-rating,em92/quakelive-local-ratings
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if result["ok"] == False:
raise HTTPException(422, result["message"])
else:
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
else:
return JSONResponse(result)
Remove result checking in http_stats_submit, as submit_match raises exception on fail
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
return JSONResponse(result)
|
<commit_before># -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if result["ok"] == False:
raise HTTPException(422, result["message"])
else:
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
else:
return JSONResponse(result)
<commit_msg>Remove result checking in http_stats_submit, as submit_match raises exception on fail<commit_after>
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
return JSONResponse(result)
|
# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if result["ok"] == False:
raise HTTPException(422, result["message"])
else:
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
else:
return JSONResponse(result)
Remove result checking in http_stats_submit, as submit_match raises exception on fail# -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
return JSONResponse(result)
|
<commit_before># -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if result["ok"] == False:
raise HTTPException(422, result["message"])
else:
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
else:
return JSONResponse(result)
<commit_msg>Remove result checking in http_stats_submit, as submit_match raises exception on fail<commit_after># -*- coding: utf-8 -*-
from starlette.exceptions import HTTPException
from starlette.requests import Request
from starlette.responses import JSONResponse
from qllr.app import App
from qllr.settings import RUN_POST_PROCESS
from qllr.submission import submit_match # TODO: перенеси в этот блупринт
bp = App()
bp.json_only_mode = True
@bp.route("/submit", methods=["POST"])
async def http_stats_submit(request: Request):
# https://github.com/PredatH0r/XonStat/blob/cfeae1b0c35c48a9f14afa98717c39aa100cde59/feeder/feeder.node.js#L989
if request.headers.get("X-D0-Blind-Id-Detached-Signature") != "dummy":
raise HTTPException(403, "signature header invalid or not found")
if request.client.host not in [
"::ffff:127.0.0.1",
"::1",
"127.0.0.1",
"testclient",
]:
raise HTTPException(403, "non-loopback requests are not allowed")
match_report = await request.body()
result = await submit_match(match_report.decode("utf-8"))
if RUN_POST_PROCESS is False:
raise HTTPException(202, result["message"])
return JSONResponse(result)
|
f9c654a60501ef734de178e7e2e7e89955eb39e0
|
jesusmtnez/python/koans/koans/about_list_assignments.py
|
jesusmtnez/python/koans/koans/about_list_assignments.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual('Sir', title)
self.assertEqual(['Ricky', 'Bobby'], first_names)
self.assertEqual('Worthington', last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(['Willie', 'Rae'], first_name)
self.assertEqual('Johnson', last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual('Rob', first_name)
self.assertEqual('Roy', last_name)
|
Complete 'About Lists Assignments' koans
|
[Python] Complete 'About Lists Assignments' koans
|
Python
|
mit
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
[Python] Complete 'About Lists Assignments' koans
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual('Sir', title)
self.assertEqual(['Ricky', 'Bobby'], first_names)
self.assertEqual('Worthington', last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(['Willie', 'Rae'], first_name)
self.assertEqual('Johnson', last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual('Rob', first_name)
self.assertEqual('Roy', last_name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
<commit_msg>[Python] Complete 'About Lists Assignments' koans<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual('Sir', title)
self.assertEqual(['Ricky', 'Bobby'], first_names)
self.assertEqual('Worthington', last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(['Willie', 'Rae'], first_name)
self.assertEqual('Johnson', last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual('Rob', first_name)
self.assertEqual('Roy', last_name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
[Python] Complete 'About Lists Assignments' koans#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual('Sir', title)
self.assertEqual(['Ricky', 'Bobby'], first_names)
self.assertEqual('Worthington', last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(['Willie', 'Rae'], first_name)
self.assertEqual('Johnson', last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual('Rob', first_name)
self.assertEqual('Roy', last_name)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(__, names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual(__, title)
self.assertEqual(__, first_names)
self.assertEqual(__, last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual(__, first_name)
self.assertEqual(__, last_name)
<commit_msg>[Python] Complete 'About Lists Assignments' koans<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based on AboutArrayAssignments in the Ruby Koans
#
from runner.koan import *
class AboutListAssignments(Koan):
def test_non_parallel_assignment(self):
names = ["John", "Smith"]
self.assertEqual(["John", "Smith"], names)
def test_parallel_assignments(self):
first_name, last_name = ["John", "Smith"]
self.assertEqual("John", first_name)
self.assertEqual("Smith", last_name)
def test_parallel_assignments_with_extra_values(self):
title, *first_names, last_name = ["Sir", "Ricky", "Bobby", "Worthington"]
self.assertEqual('Sir', title)
self.assertEqual(['Ricky', 'Bobby'], first_names)
self.assertEqual('Worthington', last_name)
def test_parallel_assignments_with_sublists(self):
first_name, last_name = [["Willie", "Rae"], "Johnson"]
self.assertEqual(['Willie', 'Rae'], first_name)
self.assertEqual('Johnson', last_name)
def test_swapping_with_parallel_assignment(self):
first_name = "Roy"
last_name = "Rob"
first_name, last_name = last_name, first_name
self.assertEqual('Rob', first_name)
self.assertEqual('Roy', last_name)
|
7d1c3ca61fb11aae181fb15d4ab825dfe9c2e710
|
runtime/__init__.py
|
runtime/__init__.py
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
, '...': Ellipsis
})
|
Add support for ... (`Ellipsis`).
|
Add support for ... (`Ellipsis`).
Note that it is still considered an operator, so stuff like `a ... b` means "call ... with a and b".
|
Python
|
mit
|
pyos/dg
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
Add support for ... (`Ellipsis`).
Note that it is still considered an operator, so stuff like `a ... b` means "call ... with a and b".
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
, '...': Ellipsis
})
|
<commit_before>import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
<commit_msg>Add support for ... (`Ellipsis`).
Note that it is still considered an operator, so stuff like `a ... b` means "call ... with a and b".<commit_after>
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
, '...': Ellipsis
})
|
import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
Add support for ... (`Ellipsis`).
Note that it is still considered an operator, so stuff like `a ... b` means "call ... with a and b".import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
, '...': Ellipsis
})
|
<commit_before>import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
})
<commit_msg>Add support for ... (`Ellipsis`).
Note that it is still considered an operator, so stuff like `a ... b` means "call ... with a and b".<commit_after>import builtins
import operator
import functools
import importlib
# Choose a function based on the number of arguments.
varary = lambda *fs: lambda *xs: fs[len(xs) - 1](*xs)
builtins.__dict__.update({
# Runtime counterparts of some stuff in `Compiler.builtins`.
'$': lambda f, *xs: f(*xs)
, ':': lambda f, *xs: f(*xs)
, ',': lambda a, *xs: (a,) + xs
, '<': operator.lt
, '<=': operator.le
, '==': operator.eq
, '!=': operator.ne
, '>': operator.gt
, '>=': operator.ge
, 'is': operator.is_
, 'in': lambda a, b: a in b
, 'not': operator.not_
, '~': operator.invert
, '+': varary(operator.pos, operator.add)
, '-': varary(operator.neg, operator.sub)
, '*': operator.mul
, '**': operator.pow
, '/': operator.truediv
, '//': operator.floordiv
, '%': operator.mod
, '!!': operator.getitem
, '&': operator.and_
, '^': operator.xor
, '|': operator.or_
, '<<': operator.lshift
, '>>': operator.rshift
# Useful stuff.
, 'import': importlib.import_module
, 'foldl': functools.reduce
, '~:': functools.partial
, '...': Ellipsis
})
|
ad16e07cce92c0ed23e5e82c60a00f04dabce2a3
|
rna-transcription/rna_transcription.py
|
rna-transcription/rna_transcription.py
|
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
Add an exception based version
|
Add an exception based version
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
Add an exception based version
|
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
<commit_before>DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
<commit_msg>Add an exception based version<commit_after>
|
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
Add an exception based versionTRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
<commit_before>DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
<commit_msg>Add an exception based version<commit_after>TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna(dna):
try:
return "".join([TRANS[n] for n in dna])
except KeyError:
return ""
# Old version: it's slightly slower for valid DNA, but slightly faster for invalid DNA
DNA = {"A", "C", "T", "G"}
TRANS = {"G": "C", "C":"G", "T":"A", "A":"U"}
def to_rna_old(dna):
# Check validity - `difference` returns elements in dna not in DNA
if set(dna).difference(DNA):
return ""
return "".join([TRANS[n] for n in dna])
|
81aa35961ba9552701eecbdb4d8e91448835aba0
|
django_autologin/utils.py
|
django_autologin/utils.py
|
import urllib
import urlparse
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
|
import urllib
import urlparse
from django.conf import settings
from django.contrib import auth
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
def login(request, user):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
|
Make login a utility so it can be re-used elsewhere.
|
Make login a utility so it can be re-used elsewhere.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
Python
|
bsd-3-clause
|
playfire/django-autologin
|
import urllib
import urlparse
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
Make login a utility so it can be re-used elsewhere.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>
|
import urllib
import urlparse
from django.conf import settings
from django.contrib import auth
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
def login(request, user):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
|
<commit_before>import urllib
import urlparse
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
<commit_msg>Make login a utility so it can be re-used elsewhere.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>
|
import urllib
import urlparse
from django.conf import settings
from django.contrib import auth
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
def login(request, user):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
|
import urllib
import urlparse
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
Make login a utility so it can be re-used elsewhere.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com>import urllib
import urlparse
from django.conf import settings
from django.contrib import auth
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
def login(request, user):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
|
<commit_before>import urllib
import urlparse
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
<commit_msg>Make login a utility so it can be re-used elsewhere.
Signed-off-by: Chris Lamb <711c73f64afdce07b7e38039a96d2224209e9a6c@playfire.com><commit_after>import urllib
import urlparse
from django.conf import settings
from django.contrib import auth
from . import app_settings
def strip_token(url):
bits = urlparse.urlparse(url)
original_query = urlparse.parse_qsl(bits.query)
query = {}
for k, v in original_query:
if k != app_settings.KEY:
query[k] = v
query = urllib.urlencode(query)
return urlparse.urlunparse(
(bits[0], bits[1], bits[2], bits[3], query, bits[5]),
)
def login(request, user):
user.backend = settings.AUTHENTICATION_BACKENDS[0]
auth.login(request, user)
|
ac30d4e6434c6c8bbcb949465a3e314088b3fc12
|
jsonfield/utils.py
|
jsonfield/utils.py
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
Revert changes: freezegun has been updated.
|
Revert changes: freezegun has been updated.
|
Python
|
bsd-3-clause
|
SideStudios/django-jsonfield
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
Revert changes: freezegun has been updated.
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
<commit_before>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
<commit_msg>Revert changes: freezegun has been updated.<commit_after>
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
Revert changes: freezegun has been updated.import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
<commit_before>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
DATETIME = (datetime.datetime,)
DATE = (datetime.date,)
TIME = (datetime.time,)
try:
import freezegun.api
except ImportError:
pass
else:
DATETIME += (freezegun.api.FakeDatetime,)
DATE += (freezegun.api.FakeDate,)
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, DATETIME):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, DATETIME):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, DATE):
return o.strftime("%Y-%m-%d")
if isinstance(o, TIME):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
<commit_msg>Revert changes: freezegun has been updated.<commit_after>import datetime
from decimal import Decimal
from django.core.serializers.json import DjangoJSONEncoder
class TZAwareJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, datetime.datetime):
return obj.strftime("%Y-%m-%d %H:%M:%S%z")
return super(TZAwareJSONEncoder, self).default(obj)
def default(o):
if hasattr(o, 'to_json'):
return o.to_json()
if isinstance(o, Decimal):
return str(o)
if isinstance(o, datetime.datetime):
if o.tzinfo:
return o.strftime('%Y-%m-%dT%H:%M:%S%z')
return o.strftime("%Y-%m-%dT%H:%M:%S")
if isinstance(o, datetime.date):
return o.strftime("%Y-%m-%d")
if isinstance(o, datetime.time):
if o.tzinfo:
return o.strftime('%H:%M:%S%z')
return o.strftime("%H:%M:%S")
raise TypeError(repr(o) + " is not JSON serializable")
|
a0c0499c3da95e53e99d6386f7970079a2669141
|
app/twitter/views.py
|
app/twitter/views.py
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
text = []
for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
else:
return render_template('projects/twitter.html')
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
return render_template('projects/twitter.html', message='Please enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
Add exception handling in twitter view
|
Add exception handling in twitter view
|
Python
|
mit
|
griimick/feature-mlsite,griimick/feature-mlsite,griimick/feature-mlsite
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
text = []
for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
else:
return render_template('projects/twitter.html')
Add exception handling in twitter view
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
return render_template('projects/twitter.html', message='Please enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
<commit_before>from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
text = []
for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
else:
return render_template('projects/twitter.html')
<commit_msg>Add exception handling in twitter view<commit_after>
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
return render_template('projects/twitter.html', message='Please enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
text = []
for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
else:
return render_template('projects/twitter.html')
Add exception handling in twitter viewfrom flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
return render_template('projects/twitter.html', message='Please enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
<commit_before>from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
text = []
for tweet in tweepy.Cursor(api.search, request.form['topic'], lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
else:
return render_template('projects/twitter.html')
<commit_msg>Add exception handling in twitter view<commit_after>from flask import Blueprint, request, render_template
from ..load import processing_results, api
import string
import tweepy
twitter_mod = Blueprint('twitter', __name__, template_folder='templates', static_folder='static')
ascii_chars = set(string.printable)
ascii_chars.remove(' ')
ascii_chars.add('...')
def takeout_non_ascii(s):
return list(filter(lambda x: x not in ascii_chars, s))
@twitter_mod.route('/twitter', methods=['GET', 'POST'])
def twitter():
if request.method == 'POST':
try:
topic = request.form['topic'].strip()
if topic == '':
return render_template('projects/twitter.html', message='Please enter a valid topic')
text = []
for tweet in tweepy.Cursor(api.search, topic, lang='hi').items(50):
temp = ''.join(takeout_non_ascii(tweet.text))
if not len(temp) in range(3):
text.append(temp)
data, emotion_sents, score, line_sentiment, text, length = processing_results(text)
return render_template('projects/twitter.html', data=[data, emotion_sents, score, zip(text, line_sentiment), length])
except Exception:
return render_template('projects/twitter.html', message='Something went wrong. Please try again.')
else:
return render_template('projects/twitter.html')
|
d26b2fd19b048d3720d757ba850d88b683d4b367
|
st2common/st2common/runners/__init__.py
|
st2common/st2common/runners/__init__.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from stevedore.driver import DriverManager
from stevedore.extension import ExtensionManager
__all__ = [
'BACKENDS_NAMESPACE',
'get_available_backends',
'get_backend_instance'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
def get_available_backends():
"""
Return names of the available / installed action runners.
:rtype: ``list`` of ``str``
"""
manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False)
return manager.names()
def get_backend_instance(name):
"""
Return a class instance for the provided runner name.
"""
manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False)
return manager.driver
|
Add functions for retrieving a list of dynamically registered runners.
|
Add functions for retrieving a list of dynamically registered runners.
Those functions match same functions exposed by the auth backend loading
functionality.
|
Python
|
apache-2.0
|
StackStorm/st2,StackStorm/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,nzlosh/st2,nzlosh/st2
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
Add functions for retrieving a list of dynamically registered runners.
Those functions match same functions exposed by the auth backend loading
functionality.
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from stevedore.driver import DriverManager
from stevedore.extension import ExtensionManager
__all__ = [
'BACKENDS_NAMESPACE',
'get_available_backends',
'get_backend_instance'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
def get_available_backends():
"""
Return names of the available / installed action runners.
:rtype: ``list`` of ``str``
"""
manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False)
return manager.names()
def get_backend_instance(name):
"""
Return a class instance for the provided runner name.
"""
manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False)
return manager.driver
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
<commit_msg>Add functions for retrieving a list of dynamically registered runners.
Those functions match same functions exposed by the auth backend loading
functionality.<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from stevedore.driver import DriverManager
from stevedore.extension import ExtensionManager
__all__ = [
'BACKENDS_NAMESPACE',
'get_available_backends',
'get_backend_instance'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
def get_available_backends():
"""
Return names of the available / installed action runners.
:rtype: ``list`` of ``str``
"""
manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False)
return manager.names()
def get_backend_instance(name):
"""
Return a class instance for the provided runner name.
"""
manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False)
return manager.driver
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
Add functions for retrieving a list of dynamically registered runners.
Those functions match same functions exposed by the auth backend loading
functionality.# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from stevedore.driver import DriverManager
from stevedore.extension import ExtensionManager
__all__ = [
'BACKENDS_NAMESPACE',
'get_available_backends',
'get_backend_instance'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
def get_available_backends():
"""
Return names of the available / installed action runners.
:rtype: ``list`` of ``str``
"""
manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False)
return manager.names()
def get_backend_instance(name):
"""
Return a class instance for the provided runner name.
"""
manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False)
return manager.driver
|
<commit_before># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
'BACKENDS_NAMESPACE'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
<commit_msg>Add functions for retrieving a list of dynamically registered runners.
Those functions match same functions exposed by the auth backend loading
functionality.<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from stevedore.driver import DriverManager
from stevedore.extension import ExtensionManager
__all__ = [
'BACKENDS_NAMESPACE',
'get_available_backends',
'get_backend_instance'
]
BACKENDS_NAMESPACE = 'st2common.runners.runner'
def get_available_backends():
"""
Return names of the available / installed action runners.
:rtype: ``list`` of ``str``
"""
manager = ExtensionManager(namespace=BACKENDS_NAMESPACE, invoke_on_load=False)
return manager.names()
def get_backend_instance(name):
"""
Return a class instance for the provided runner name.
"""
manager = DriverManager(namespace=BACKENDS_NAMESPACE, name=name, invoke_on_load=False)
return manager.driver
|
1c35bf9c4831babcdaabd9feb291a757ad298657
|
src/dbbrankingparser/__init__.py
|
src/dbbrankingparser/__init__.py
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION: str = '0.4-dev'
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION = '0.4-dev'
|
Remove type annotation from `VERSION` as setuptools can't handle it
|
Remove type annotation from `VERSION` as setuptools can't handle it
|
Python
|
mit
|
homeworkprod/dbb-ranking-parser
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION: str = '0.4-dev'
Remove type annotation from `VERSION` as setuptools can't handle it
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION = '0.4-dev'
|
<commit_before>"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION: str = '0.4-dev'
<commit_msg>Remove type annotation from `VERSION` as setuptools can't handle it<commit_after>
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION = '0.4-dev'
|
"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION: str = '0.4-dev'
Remove type annotation from `VERSION` as setuptools can't handle it"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION = '0.4-dev'
|
<commit_before>"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION: str = '0.4-dev'
<commit_msg>Remove type annotation from `VERSION` as setuptools can't handle it<commit_after>"""
DBB Ranking Parser
~~~~~~~~~~~~~~~~~~
Extract league rankings from the DBB (Deutscher Basketball Bund e.V.)
website.
The resulting data is structured as a list of dictionaries.
Please note that rankings are usually only available for the current
season, but not those of the past.
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: MIT, see LICENSE for details.
"""
from .main import load_ranking_for_league, load_ranking_from_url
VERSION = '0.4-dev'
|
b3ae8ed9c17ed9371a80d14d97062136da225a92
|
src/chicago_flow.py
|
src/chicago_flow.py
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2016)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2015)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
Revert year range end back to 2015 (2016 is not over)
|
Revert year range end back to 2015 (2016 is not over)
|
Python
|
unlicense
|
datascopeanalytics/chicago-new-business,datascopeanalytics/chicago-new-business
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2016)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
Revert year range end back to 2015 (2016 is not over)
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2015)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
<commit_before>#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2016)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
<commit_msg>Revert year range end back to 2015 (2016 is not over)<commit_after>
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2015)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2016)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
Revert year range end back to 2015 (2016 is not over)#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2015)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
<commit_before>#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2016)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
<commit_msg>Revert year range end back to 2015 (2016 is not over)<commit_after>#!/usr/bin/env python
import csv
import sys
import figs
def load_counts(filename):
counts = {}
with open(filename) as stream:
stream.readline()
reader = csv.reader(stream)
for row in reader:
year, count = map(int, row)
counts[year] = count
return counts
# read in the data
new_counts = load_counts(sys.argv[1])
old_counts = load_counts(sys.argv[2])
year_range = range(2004, 2015)
new_counts = [new_counts[year] for year in year_range]
old_counts = [-old_counts[year] for year in year_range]
fig = figs.FlowOverTime(year_range, new_counts, old_counts)
fig.save(sys.argv[-1])
|
b4d3fbb0535074f2153b2b9bad53fdf654ddedd1
|
src/python/borg/tools/bench_bellman.py
|
src/python/borg/tools/bench_bellman.py
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
# need a place to dump profiling results
from tempfile import NamedTemporaryFile
with NamedTemporaryFile() as named:
# profile the computation
from cProfile import runctx
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
context = {"c": compute_bellman_plan, "m": build_real_model()}
profile = runctx("c(m, 6, 1e6, 1.0)", {}, context, named.name)
# extract the results
from pstats import Stats
stats = Stats(named.name)
# display a report
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
model = build_real_model()
stats = call_profiled(lambda: compute_bellman_plan(model, 6, 1e6, 1.0))
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
Clean up the Bellman benchmarking code.
|
Clean up the Bellman benchmarking code.
|
Python
|
mit
|
borg-project/borg
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
# need a place to dump profiling results
from tempfile import NamedTemporaryFile
with NamedTemporaryFile() as named:
# profile the computation
from cProfile import runctx
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
context = {"c": compute_bellman_plan, "m": build_real_model()}
profile = runctx("c(m, 6, 1e6, 1.0)", {}, context, named.name)
# extract the results
from pstats import Stats
stats = Stats(named.name)
# display a report
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
Clean up the Bellman benchmarking code.
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
model = build_real_model()
stats = call_profiled(lambda: compute_bellman_plan(model, 6, 1e6, 1.0))
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
<commit_before>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
# need a place to dump profiling results
from tempfile import NamedTemporaryFile
with NamedTemporaryFile() as named:
# profile the computation
from cProfile import runctx
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
context = {"c": compute_bellman_plan, "m": build_real_model()}
profile = runctx("c(m, 6, 1e6, 1.0)", {}, context, named.name)
# extract the results
from pstats import Stats
stats = Stats(named.name)
# display a report
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
<commit_msg>Clean up the Bellman benchmarking code.<commit_after>
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
model = build_real_model()
stats = call_profiled(lambda: compute_bellman_plan(model, 6, 1e6, 1.0))
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
# need a place to dump profiling results
from tempfile import NamedTemporaryFile
with NamedTemporaryFile() as named:
# profile the computation
from cProfile import runctx
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
context = {"c": compute_bellman_plan, "m": build_real_model()}
profile = runctx("c(m, 6, 1e6, 1.0)", {}, context, named.name)
# extract the results
from pstats import Stats
stats = Stats(named.name)
# display a report
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
Clean up the Bellman benchmarking code."""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
model = build_real_model()
stats = call_profiled(lambda: compute_bellman_plan(model, 6, 1e6, 1.0))
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
<commit_before>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
# need a place to dump profiling results
from tempfile import NamedTemporaryFile
with NamedTemporaryFile() as named:
# profile the computation
from cProfile import runctx
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
context = {"c": compute_bellman_plan, "m": build_real_model()}
profile = runctx("c(m, 6, 1e6, 1.0)", {}, context, named.name)
# extract the results
from pstats import Stats
stats = Stats(named.name)
# display a report
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
<commit_msg>Clean up the Bellman benchmarking code.<commit_after>"""
@author: Bryan Silverthorn <bcs@cargo-cult.org>
"""
if __name__ == "__main__":
from borg.tools.bench_bellman import main
raise SystemExit(main())
def main():
"""
Benchmark the Bellman plan computation code.
"""
from borg.portfolio.bellman import compute_bellman_plan
from borg.portfolio.test.test_bellman import build_real_model
model = build_real_model()
stats = call_profiled(lambda: compute_bellman_plan(model, 6, 1e6, 1.0))
stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats()
|
37eeecd3d4d1e6d2972565961b5c31731ae55ec7
|
tests/tester.py
|
tests/tester.py
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
Test the an empty document results in a test failure.
|
Test the an empty document results in a test failure.
|
Python
|
mit
|
bnkr/servequnit,bnkr/servequnit,bnkr/servequnit,bnkr/selenit,bnkr/selenit
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
Test the an empty document results in a test failure.
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
<commit_before>import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
<commit_msg>Test the an empty document results in a test failure.<commit_after>
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
Test the an empty document results in a test failure.import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
<commit_before>import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
passing = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
passing = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory()
factory.bind_script("test", passing)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
<commit_msg>Test the an empty document results in a test failure.<commit_after>import os
from unittest import TestCase
from servequnit.factory import ServerFactory
from servequnit.tester import QunitSeleniumTester, TestFailedError
class QunitSeleniumTesterTestCase(TestCase):
def _make_tester(self, server, suffix=None):
suffix = suffix or "oneshot/"
url = server.url + suffix
hub = "http://127.0.0.1:4444/wd/hub"
tester = QunitSeleniumTester(url=url, hub=hub)
return tester
def test_passing_test_passes(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "passes.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
tester.run()
def test_failing_test_reports_failure(self):
test_file = os.path.join(os.path.dirname(__file__), "data", "fails.js")
factory = ServerFactory().bind_script("test", test_file)
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
def test_failing_test_reports_no_tests(self):
factory = ServerFactory().bind_script("test", "/dev/null")
with factory.run() as server:
tester = self._make_tester(server)
self.assertRaises(TestFailedError, tester.run)
|
4027cccb929308528666e1232eeebfc1988e0ab1
|
tests/iam/test_iam_valid_json.py
|
tests/iam/test_iam_valid_json.py
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def test_all_iam_templates():
"""Verify all IAM templates render as proper JSON."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_templates = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for template in iam_templates:
*_, service_json = template.split('/')
service, *_ = service_json.split('.')
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def iam_templates():
"""Generate list of IAM templates."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_template_names = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for iam_template_name in iam_template_names:
yield iam_template_name
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
Use generator for IAM template names
|
test: Use generator for IAM template names
See also: #208
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def test_all_iam_templates():
"""Verify all IAM templates render as proper JSON."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_templates = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for template in iam_templates:
*_, service_json = template.split('/')
service, *_ = service_json.split('.')
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
test: Use generator for IAM template names
See also: #208
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def iam_templates():
"""Generate list of IAM templates."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_template_names = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for iam_template_name in iam_template_names:
yield iam_template_name
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
<commit_before>"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def test_all_iam_templates():
"""Verify all IAM templates render as proper JSON."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_templates = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for template in iam_templates:
*_, service_json = template.split('/')
service, *_ = service_json.split('.')
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
<commit_msg>test: Use generator for IAM template names
See also: #208<commit_after>
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def iam_templates():
"""Generate list of IAM templates."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_template_names = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for iam_template_name in iam_template_names:
yield iam_template_name
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def test_all_iam_templates():
"""Verify all IAM templates render as proper JSON."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_templates = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for template in iam_templates:
*_, service_json = template.split('/')
service, *_ = service_json.split('.')
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
test: Use generator for IAM template names
See also: #208"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def iam_templates():
"""Generate list of IAM templates."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_template_names = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for iam_template_name in iam_template_names:
yield iam_template_name
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
<commit_before>"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def test_all_iam_templates():
"""Verify all IAM templates render as proper JSON."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_templates = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for template in iam_templates:
*_, service_json = template.split('/')
service, *_ = service_json.split('.')
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
<commit_msg>test: Use generator for IAM template names
See also: #208<commit_after>"""Test IAM Policy templates are valid JSON."""
import jinja2
from foremast.iam.construct_policy import render_policy_template
from foremast.utils.templates import LOCAL_TEMPLATES
def iam_templates():
"""Generate list of IAM templates."""
jinjaenv = jinja2.Environment(loader=jinja2.FileSystemLoader([LOCAL_TEMPLATES]))
iam_template_names = jinjaenv.list_templates(filter_func=lambda x: all([
x.startswith('infrastructure/iam/'),
'trust' not in x,
'wrapper' not in x, ]))
for iam_template_name in iam_template_names:
yield iam_template_name
items = ['resource1', 'resource2']
if service == 'rds-db':
items = {
'resource1': 'user1',
'resource2': 'user2',
}
rendered = render_policy_template(
account_number='',
app='coreforrest',
env='dev',
group='forrest',
items=items,
pipeline_settings={
'lambda': {
'vpc_enabled': False,
},
},
region='us-east-1',
service=service)
assert isinstance(rendered, list)
|
8f0caecc4accf8258e2cae664181680973e1add6
|
hftools/dataset/tests/test_helper.py
|
hftools/dataset/tests/test_helper.py
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
Fix to remove DeprecationWarning message from test log
|
Fix to remove DeprecationWarning message from test log
|
Python
|
bsd-3-clause
|
hftools/hftools
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
Fix to remove DeprecationWarning message from test log
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
<commit_before># -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
<commit_msg>Fix to remove DeprecationWarning message from test log<commit_after>
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
Fix to remove DeprecationWarning message from test log# -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
<commit_before># -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertDictContainsSubset(dict(R="Ohm"),
helper._varname_unit_guess_db)
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
<commit_msg>Fix to remove DeprecationWarning message from test log<commit_after># -*- coding: ISO-8859-1 -*-
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import pdb
import numpy as np
import hftools.dataset.helper as helper
from hftools.testing import TestCase, make_load_tests
from hftools.dataset import hfarray
#uncomment to enable doctests
#load_tests = make_load_tests(helper)
basepath = os.path.split(__file__)[0]
from hftools.constants import unit_to_multiplier
class Test_add_var_guess(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
helper.add_var_guess("R", "Ohm")
self.assertIn("R", helper._varname_unit_guess_db)
self.assertEqual("Ohm", helper._varname_unit_guess_db["R"])
class Test_guess_unit_from_varname(TestCase):
def setUp(self):
self.old = helper._varname_unit_guess_db.copy()
def tearDown(self):
helper._varname_unit_guess_db = self.old
def test_1(self):
unit = helper.guess_unit_from_varname("Vds")
self.assertEqual(unit, "V")
|
f5983348940e3acf937c7ddfded73f08d767c5a1
|
j1a/verilator/setup.py
|
j1a/verilator/setup.py
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"],
extra_compile_args=['-O2'])
],
)
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir",
"/usr/local/share/verilator/include/",
"/usr/share/verilator/include/",
"/usr/local/share/verilator/include/vltstd/",
"/usr/share/verilator/include/vltstd/"],
extra_compile_args=['-O2'])
],
)
|
Add vltstd to include path
|
Add vltstd to include path
|
Python
|
bsd-3-clause
|
jamesbowman/swapforth,zuloloxi/swapforth,uho/swapforth,uho/swapforth,zuloloxi/swapforth,GuzTech/swapforth,jamesbowman/swapforth,jamesbowman/swapforth,GuzTech/swapforth,uho/swapforth,RGD2/swapforth,zuloloxi/swapforth,RGD2/swapforth,GuzTech/swapforth,zuloloxi/swapforth,GuzTech/swapforth,uho/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"],
extra_compile_args=['-O2'])
],
)
Add vltstd to include path
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir",
"/usr/local/share/verilator/include/",
"/usr/share/verilator/include/",
"/usr/local/share/verilator/include/vltstd/",
"/usr/share/verilator/include/vltstd/"],
extra_compile_args=['-O2'])
],
)
|
<commit_before>from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"],
extra_compile_args=['-O2'])
],
)
<commit_msg>Add vltstd to include path<commit_after>
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir",
"/usr/local/share/verilator/include/",
"/usr/share/verilator/include/",
"/usr/local/share/verilator/include/vltstd/",
"/usr/share/verilator/include/vltstd/"],
extra_compile_args=['-O2'])
],
)
|
from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"],
extra_compile_args=['-O2'])
],
)
Add vltstd to include pathfrom distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir",
"/usr/local/share/verilator/include/",
"/usr/share/verilator/include/",
"/usr/local/share/verilator/include/vltstd/",
"/usr/share/verilator/include/vltstd/"],
extra_compile_args=['-O2'])
],
)
|
<commit_before>from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir", "/usr/local/share/verilator/include/", "/usr/share/verilator/include/"],
extra_compile_args=['-O2'])
],
)
<commit_msg>Add vltstd to include path<commit_after>from distutils.core import setup
from distutils.extension import Extension
from os import system
setup(name='vsimj1a',
ext_modules=[
Extension('vsimj1a',
['vsim.cpp'],
depends=["obj_dir/Vv3__ALL.a"],
extra_objects=["obj_dir/verilated.o", "obj_dir/Vj1a__ALL.a"],
include_dirs=["obj_dir",
"/usr/local/share/verilator/include/",
"/usr/share/verilator/include/",
"/usr/local/share/verilator/include/vltstd/",
"/usr/share/verilator/include/vltstd/"],
extra_compile_args=['-O2'])
],
)
|
0f7732d3ceb67ecd445bb4fe2fee1edf4ce8a2f4
|
rock/utils.py
|
rock/utils.py
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
Tweak raw text parameter name
|
Tweak raw text parameter name
|
Python
|
mit
|
silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock,silas/rock
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
Tweak raw text parameter name
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
<commit_before>from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
<commit_msg>Tweak raw text parameter name<commit_after>
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
Tweak raw text parameter namefrom __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
<commit_before>from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(value):
return value.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
<commit_msg>Tweak raw text parameter name<commit_after>from __future__ import unicode_literals
import os
try:
from io import StringIO
except ImportError: # pragma: no cover
from StringIO import StringIO
from rock.exceptions import ConfigError
ROCK_SHELL = (os.environ.get('ROCK_SHELL') or '/bin/bash -c').split()
ROCK_SHELL.insert(1, os.path.basename(ROCK_SHELL[0]))
def isexecutable(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
try:
basestring
def isstr(s):
return isinstance(s, basestring)
except NameError: # pragma: no cover
def isstr(s):
return isinstance(s, str)
def raw(text):
return text.replace('\\', '\\\\')
class Shell(object):
def __init__(self):
self.stdin = StringIO()
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.run()
def run(self):
if not isexecutable(ROCK_SHELL[0]):
raise ConfigError('invalid ROCK_SHELL: %s' % ROCK_SHELL)
os.execl(*(ROCK_SHELL + [self.stdin.getvalue()]))
def write(self, text):
self.stdin.write(text + '\n')
|
c7b684ebf85e2a80e0acdd44ea91171bc1aa6388
|
jarbas/chamber_of_deputies/fields.py
|
jarbas/chamber_of_deputies/fields.py
|
from datetime import date
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%dT%H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = super(DateAsStringField, cls).deserialize(value)
if value: # useful when serializing it to Celery
return value.strftime(cls.OUTPUT_FORMAT)
|
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = value.replace('T', ' ') # normalize date/time separator
return super(DateAsStringField, cls).deserialize(value)
|
Make date imports more flexible
|
Make date imports more flexible
Now ut works with `YYYY-MM-DD HH:MM:SS` and with `YYYY-MM-DDTHH:MM:SS`.
|
Python
|
mit
|
datasciencebr/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/jarbas,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/jarbas,marcusrehm/serenata-de-amor,datasciencebr/serenata-de-amor,marcusrehm/serenata-de-amor
|
from datetime import date
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%dT%H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = super(DateAsStringField, cls).deserialize(value)
if value: # useful when serializing it to Celery
return value.strftime(cls.OUTPUT_FORMAT)
Make date imports more flexible
Now ut works with `YYYY-MM-DD HH:MM:SS` and with `YYYY-MM-DDTHH:MM:SS`.
|
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = value.replace('T', ' ') # normalize date/time separator
return super(DateAsStringField, cls).deserialize(value)
|
<commit_before>from datetime import date
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%dT%H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = super(DateAsStringField, cls).deserialize(value)
if value: # useful when serializing it to Celery
return value.strftime(cls.OUTPUT_FORMAT)
<commit_msg>Make date imports more flexible
Now ut works with `YYYY-MM-DD HH:MM:SS` and with `YYYY-MM-DDTHH:MM:SS`.<commit_after>
|
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = value.replace('T', ' ') # normalize date/time separator
return super(DateAsStringField, cls).deserialize(value)
|
from datetime import date
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%dT%H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = super(DateAsStringField, cls).deserialize(value)
if value: # useful when serializing it to Celery
return value.strftime(cls.OUTPUT_FORMAT)
Make date imports more flexible
Now ut works with `YYYY-MM-DD HH:MM:SS` and with `YYYY-MM-DDTHH:MM:SS`.from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = value.replace('T', ' ') # normalize date/time separator
return super(DateAsStringField, cls).deserialize(value)
|
<commit_before>from datetime import date
from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%dT%H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = super(DateAsStringField, cls).deserialize(value)
if value: # useful when serializing it to Celery
return value.strftime(cls.OUTPUT_FORMAT)
<commit_msg>Make date imports more flexible
Now ut works with `YYYY-MM-DD HH:MM:SS` and with `YYYY-MM-DDTHH:MM:SS`.<commit_after>from rows import fields
class IntegerField(fields.IntegerField):
@classmethod
def deserialize(cls, value, *args, **kwargs):
try: # Rows cannot convert values such as '2011.0' to integer
value = int(float(value))
except:
pass
return super(IntegerField, cls).deserialize(value)
class DateAsStringField(fields.DateField):
INPUT_FORMAT = '%Y-%m-%d %H:%M:%S'
OUTPUT_FORMAT = '%Y-%m-%d'
@classmethod
def deserialize(cls, value, *args, **kwargs):
value = value.replace('T', ' ') # normalize date/time separator
return super(DateAsStringField, cls).deserialize(value)
|
158e11dc1c11e606621a729b3b220cecf5ca700a
|
awx/api/management/commands/uses_mongo.py
|
awx/api/management/commands/uses_mongo.py
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking # noqa
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
Set noqa to silence flake8.
|
Set noqa to silence flake8.
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
Set noqa to silence flake8.
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking # noqa
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
<commit_msg>Set noqa to silence flake8.<commit_after>
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking # noqa
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
Set noqa to silence flake8.# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking # noqa
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
<commit_before># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
<commit_msg>Set noqa to silence flake8.<commit_after># Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved
import sys
from django.core.management.base import BaseCommand
from awx.main.task_engine import TaskSerializer
class Command(BaseCommand):
"""Return a exit status of 0 if MongoDB should be active, and an
exit status of 1 otherwise.
This script is intended to be used by bash and init scripts to
conditionally start MongoDB, so its focus is on being bash-friendly.
"""
def handle(self, **kwargs):
# Get the license data.
license_reader = TaskSerializer()
license_data = license_reader.from_file()
# Does the license contain the system tracking feature?
# If and only if it does, MongoDB should run.
system_tracking = license_data['features']['system_tracking']
# Okay, do we need MongoDB to be turned on?
# This is a silly variable assignment right now, but I expect the
# rules here will grow more complicated over time.
# FIXME: Most likely this should be False if HA is active
# (not just enabled by license, but actually in use).
uses_mongo = system_tracking # noqa
# If we do not need Mongo, return a non-zero exit status.
print('MongoDB NOT required')
sys.exit(1)
# We do need Mongo, return zero.
print('MongoDB required')
sys.exit(0)
|
7f79e575b9a2b5dc15ed304e2c1cb123ab39b91b
|
iscc_bench/metaid/shortnorm.py
|
iscc_bench/metaid/shortnorm.py
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
nfd_nfc = unicodedata.normalize('NFC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
Add NFD_NFC to unicode normalization comparison.
|
Add NFD_NFC to unicode normalization comparison.
|
Python
|
bsd-2-clause
|
coblo/isccbench
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
Add NFD_NFC to unicode normalization comparison.
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
nfd_nfc = unicodedata.normalize('NFC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
<commit_before># -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
<commit_msg>Add NFD_NFC to unicode normalization comparison.<commit_after>
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
nfd_nfc = unicodedata.normalize('NFC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
Add NFD_NFC to unicode normalization comparison.# -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
nfd_nfc = unicodedata.normalize('NFC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
<commit_before># -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
<commit_msg>Add NFD_NFC to unicode normalization comparison.<commit_after># -*- coding: utf-8 -*-
import unicodedata
def shortest_normalization_form():
"""
Find unicode normalization that generates shortest utf8 encoded text.
Result NFKC
"""
s = 'Iñtërnâtiônàlizætiøn☃ and string escaping are ticky   things'
nfc = unicodedata.normalize('NFC', s)
nfd = unicodedata.normalize('NFD', s)
nfkc = unicodedata.normalize('NFKC', s)
nfkd = unicodedata.normalize('NFKD', s)
nfd_nfkc = unicodedata.normalize('NFKC', nfd)
nfd_nfc = unicodedata.normalize('NFC', nfd)
print('UTF-8 length of normalized strings:\n')
print(f'NFC: {len(nfc.encode("utf8"))}')
print(f'NFD: {len(nfd.encode("utf8"))}')
print(f'NFKC: {len(nfkc.encode("utf8"))}')
print(f'NFKD: {len(nfkd.encode("utf8"))}')
print(f'NFD_NFKC: {len(nfd_nfkc.encode("utf8"))}')
print(f'NFD_NFC: {len(nfd_nfc.encode("utf8"))}')
if __name__ == '__main__':
shortest_normalization_form()
|
ab079e05cb0a242235c1f506cb710279bf233ba0
|
opps/channels/context_processors.py
|
opps/channels/context_processors.py
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
Fix order on opps menu
|
Fix order on opps menu
|
Python
|
mit
|
YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,williamroot/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
Fix order on opps menu
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
<commit_before># -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
<commit_msg>Fix order on opps menu<commit_after>
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
Fix order on opps menu# -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
<commit_before># -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True)
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
<commit_msg>Fix order on opps menu<commit_after># -*- coding: utf-8 -*-
from django.utils import timezone
from django.conf import settings
from django.contrib.sites.models import get_current_site
from .models import Channel
def channel_context(request):
""" Channel context processors
"""
site = get_current_site(request)
opps_menu = Channel.objects.filter(site=site,
date_available__lte=timezone.now(),
published=True,
show_in_menu=True).order_by('order')
return {'opps_menu': opps_menu,
'opps_channel_conf_all': settings.OPPS_CHANNEL_CONF}
|
e07d6d0db7dfed8013f6b1b058167aa16070fc35
|
messente/verigator/routes.py
|
messente/verigator/routes.py
|
URL = "https://api.dev.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
URL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
Update server endpoint from dev to production
|
Update server endpoint from dev to production
|
Python
|
apache-2.0
|
messente/verigator-python
|
URL = "https://api.dev.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
Update server endpoint from dev to production
|
URL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
<commit_before>URL = "https://api.dev.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
<commit_msg>Update server endpoint from dev to production<commit_after>
|
URL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
URL = "https://api.dev.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
Update server endpoint from dev to productionURL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
<commit_before>URL = "https://api.dev.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
<commit_msg>Update server endpoint from dev to production<commit_after>URL = "https://api.verigator.com"
CREATE_SERVICE = "v1/service/service"
GET_SERVICE = "v1/service/service/{}"
DELETE_SERVICE = "v1/service/service/{}"
GET_USERS = "v1/service/service/{}/users"
GET_USER = "v1/service/service/{}/users/{}"
CREATE_USER = "v1/service/service/{}/users"
DELETE_USER = "v1/service/service/{}/users/{}"
AUTH_INITIATE = "v1/service/service/{}/users/{}/auth"
AUTH_VERIFY = "v1/service/service/{}/users/{}/auth"
|
6281da3b846bfea26ea68e3fe480c738a5181506
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
#!/usr/bin/env python
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
Add test-runner option to run zpop* tests.
|
Add test-runner option to run zpop* tests.
|
Python
|
mit
|
coleifer/walrus
|
#!/usr/bin/env python
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
Add test-runner option to run zpop* tests.
|
#!/usr/bin/env python
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
<commit_before>#!/usr/bin/env python
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
<commit_msg>Add test-runner option to run zpop* tests.<commit_after>
|
#!/usr/bin/env python
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
#!/usr/bin/env python
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
Add test-runner option to run zpop* tests.#!/usr/bin/env python
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
<commit_before>#!/usr/bin/env python
import optparse
import sys
import unittest
from walrus import tests
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
options, args = parser.parse_args()
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
<commit_msg>Add test-runner option to run zpop* tests.<commit_after>#!/usr/bin/env python
import optparse
import os
import sys
import unittest
def runtests(verbose=False, failfast=False, names=None):
if names:
suite = unittest.TestLoader().loadTestsFromNames(names, tests)
else:
suite = unittest.TestLoader().loadTestsFromModule(tests)
runner = unittest.TextTestRunner(verbosity=2 if verbose else 1,
failfast=failfast)
return runner.run(suite)
if __name__ == '__main__':
try:
from redis import Redis
except ImportError:
raise RuntimeError('redis-py must be installed.')
else:
try:
Redis().info()
except:
raise RuntimeError('redis server does not appear to be running')
parser = optparse.OptionParser()
parser.add_option('-v', '--verbose', action='store_true', default=False,
dest='verbose', help='Verbose output.')
parser.add_option('-f', '--failfast', action='store_true', default=False,
help='Stop on first failure or error.')
parser.add_option('-z', '--zpop', action='store_true',
help='Run ZPOP* tests.')
options, args = parser.parse_args()
if options.zpop:
os.environ['TEST_ZPOP'] = '1'
from walrus import tests
result = runtests(
verbose=options.verbose,
failfast=options.failfast,
names=args)
if result.failures:
sys.exit(1)
elif result.errors:
sys.exit(2)
|
cf0110f2b1adc8fbf4b8305841961d67da33f8c7
|
pybo/bayesopt/policies/thompson.py
|
pybo/bayesopt/policies/thompson.py
|
"""
Acquisition functions based on (GP) UCB.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
|
"""
Implementation of Thompson sampling for continuous spaces.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
Fix Thompson to pay attention to the RNG.
|
Fix Thompson to pay attention to the RNG.
|
Python
|
bsd-2-clause
|
mwhoffman/pybo,jhartford/pybo
|
"""
Acquisition functions based on (GP) UCB.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
Fix Thompson to pay attention to the RNG.
|
"""
Implementation of Thompson sampling for continuous spaces.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
<commit_before>"""
Acquisition functions based on (GP) UCB.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
<commit_msg>Fix Thompson to pay attention to the RNG.<commit_after>
|
"""
Implementation of Thompson sampling for continuous spaces.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
"""
Acquisition functions based on (GP) UCB.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
Fix Thompson to pay attention to the RNG."""
Implementation of Thompson sampling for continuous spaces.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
<commit_before>"""
Acquisition functions based on (GP) UCB.
"""
# future imports
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
# use this to simplify (slightly) the Thompson implementation with sampled
# models.
from collections import deque
# local imports
from ..utils import params
# exported symbols
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n).get
<commit_msg>Fix Thompson to pay attention to the RNG.<commit_after>"""
Implementation of Thompson sampling for continuous spaces.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from collections import deque
from ..utils import params
__all__ = ['Thompson']
@params('n')
def Thompson(model, n=100, rng=None):
"""
Implementation of Thompson sampling for continuous models using a finite
approximation to the kernel matrix with `n` Fourier components.
"""
if hasattr(model, '__iter__'):
model = deque(model, maxlen=1).pop()
return model.sample_fourier(n, rng).get
|
c96a2f636b48b065e8404af6d67fbae5986fd34a
|
tests/basics/subclass_native2_tuple.py
|
tests/basics/subclass_native2_tuple.py
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
Expand test cases for equality of subclasses.
|
tests/basics: Expand test cases for equality of subclasses.
|
Python
|
mit
|
pramasoul/micropython,adafruit/circuitpython,henriknelson/micropython,MrSurly/micropython,bvernoux/micropython,tobbad/micropython,kerneltask/micropython,kerneltask/micropython,tobbad/micropython,tobbad/micropython,pramasoul/micropython,selste/micropython,adafruit/circuitpython,henriknelson/micropython,pozetroninc/micropython,pozetroninc/micropython,MrSurly/micropython,pozetroninc/micropython,adafruit/circuitpython,kerneltask/micropython,tobbad/micropython,pramasoul/micropython,selste/micropython,bvernoux/micropython,MrSurly/micropython,adafruit/circuitpython,bvernoux/micropython,selste/micropython,henriknelson/micropython,pozetroninc/micropython,kerneltask/micropython,henriknelson/micropython,tobbad/micropython,kerneltask/micropython,pozetroninc/micropython,selste/micropython,pramasoul/micropython,MrSurly/micropython,pramasoul/micropython,MrSurly/micropython,henriknelson/micropython,bvernoux/micropython,adafruit/circuitpython,selste/micropython,bvernoux/micropython,adafruit/circuitpython
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
tests/basics: Expand test cases for equality of subclasses.
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
<commit_before>class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
<commit_msg>tests/basics: Expand test cases for equality of subclasses.<commit_after>
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
tests/basics: Expand test cases for equality of subclasses.class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
<commit_before>class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
<commit_msg>tests/basics: Expand test cases for equality of subclasses.<commit_after>class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Ctuple1(Base1, tuple):
pass
a = Ctuple1()
print(len(a))
a = Ctuple1([1, 2, 3])
print(len(a))
print("---")
class Ctuple2(tuple, Base1):
pass
a = Ctuple2()
print(len(a))
a = Ctuple2([1, 2, 3])
print(len(a))
a = tuple([1,2,3])
b = Ctuple1([1,2,3])
c = Ctuple2([1,2,3])
print(a == b)
print(b == c)
print(c == a)
|
4d3a0dc3b3b8a11a066f52bc78b1160e194ad64f
|
wmtexe/cmd/script.py
|
wmtexe/cmd/script.py
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid)
if args.run:
launcher.run()
else:
print(launcher.script())
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script())
|
Add 'server-url' command line argument
|
Add 'server-url' command line argument
Its value is passed to the server_url parameter of the Launcher class.
|
Python
|
mit
|
csdms/wmt-exe,csdms/wmt-exe,csdms/wmt-exe,csdms/wmt-exe
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid)
if args.run:
launcher.run()
else:
print(launcher.script())
Add 'server-url' command line argument
Its value is passed to the server_url parameter of the Launcher class.
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script())
|
<commit_before>"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid)
if args.run:
launcher.run()
else:
print(launcher.script())
<commit_msg>Add 'server-url' command line argument
Its value is passed to the server_url parameter of the Launcher class.<commit_after>
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script())
|
"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid)
if args.run:
launcher.run()
else:
print(launcher.script())
Add 'server-url' command line argument
Its value is passed to the server_url parameter of the Launcher class."""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script())
|
<commit_before>"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid)
if args.run:
launcher.run()
else:
print(launcher.script())
<commit_msg>Add 'server-url' command line argument
Its value is passed to the server_url parameter of the Launcher class.<commit_after>"""Launch a WMT simulation using `bash` or `qsub`."""
from __future__ import print_function
import sys
import os
from ..launcher import BashLauncher, QsubLauncher
_LAUNCHERS = {
'bash': BashLauncher,
'qsub': QsubLauncher,
}
def main():
import argparse
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('uuid', type=str,
help='Unique identifier for simulation')
parser.add_argument('--extra-args', default='',
help='Extra arguments for wmt-slave command')
parser.add_argument('--server-url', default='',
help='WMT API server URL')
parser.add_argument('--launcher', choices=_LAUNCHERS.keys(),
default='bash', help='Launch method')
parser.add_argument('--run', action='store_true',
help='Launch simulation')
args = parser.parse_args()
launcher = _LAUNCHERS[args.launcher](args.uuid,
server_url=args.server_url)
if args.run:
launcher.run()
else:
print(launcher.script())
|
f5408c02202a07a1b45019eefb505eb8a0d21852
|
swagger2markdown.py
|
swagger2markdown.py
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
Fix crash when URL is provided.
|
Fix crash when URL is provided.
|
Python
|
mit
|
moigagoo/swagger2markdown
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
Fix crash when URL is provided.
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
<commit_before>import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
<commit_msg>Fix crash when URL is provided.<commit_after>
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
Fix crash when URL is provided.import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
<commit_before>import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except FileNotFoundError:
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
<commit_msg>Fix crash when URL is provided.<commit_after>import argparse, json, os.path
import jinja2, requests
def main():
parser = argparse.ArgumentParser()
parser.add_argument(
"-i", "--input",
default="swagger.json",
help="path to or URL of the Swagger JSON file (default: swagger.json)",
metavar="SWAGGER_LOCATION"
)
parser.add_argument(
"-o", "--output",
default="swagger.md",
help="path to the output Markdown file (default: swagger.md)",
metavar="OUTPUT"
)
parser.add_argument(
"-t", "--template",
default=os.path.join(os.path.dirname(__file__), "swagger.md.j2"),
help="Jinja2 template used for conversion",
metavar="TEMPLATE"
)
args = parser.parse_args()
try:
swagger_data = json.load(open(args.input, encoding="utf8"))
except (FileNotFoundError, OSError):
swagger_data = requests.get(args.input).json()
template = jinja2.Template(open(args.template, encoding="utf8").read())
with open(args.output, "w", encoding="utf8") as output:
output.write(template.render(swagger_data=swagger_data))
|
75ae022a615d51850e5c8766b1a300207489559d
|
django_jinja/__init__.py
|
django_jinja/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = (0, 2, 0, 'final', 0)
|
# -*- coding: utf-8 -*-
__version__ = (0, 3, 0, 'final', 0)
|
Increment version number to 0.3
|
Increment version number to 0.3
|
Python
|
bsd-3-clause
|
akx/django-jinja,glogiotatidis/django-jinja,niwinz/django-jinja,akx/django-jinja,glogiotatidis/django-jinja,akx/django-jinja,glogiotatidis/django-jinja,niwinz/django-jinja,niwinz/django-jinja,glogiotatidis/django-jinja,akx/django-jinja
|
# -*- coding: utf-8 -*-
__version__ = (0, 2, 0, 'final', 0)
Increment version number to 0.3
|
# -*- coding: utf-8 -*-
__version__ = (0, 3, 0, 'final', 0)
|
<commit_before># -*- coding: utf-8 -*-
__version__ = (0, 2, 0, 'final', 0)
<commit_msg>Increment version number to 0.3<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = (0, 3, 0, 'final', 0)
|
# -*- coding: utf-8 -*-
__version__ = (0, 2, 0, 'final', 0)
Increment version number to 0.3# -*- coding: utf-8 -*-
__version__ = (0, 3, 0, 'final', 0)
|
<commit_before># -*- coding: utf-8 -*-
__version__ = (0, 2, 0, 'final', 0)
<commit_msg>Increment version number to 0.3<commit_after># -*- coding: utf-8 -*-
__version__ = (0, 3, 0, 'final', 0)
|
69595a9617ce83e04c5de5f4d8cd6185765f3697
|
django_jobvite/models.py
|
django_jobvite/models.py
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=35)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=50)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
Increase size of category field.
|
Increase size of category field.
|
Python
|
bsd-3-clause
|
mozilla/django-jobvite
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=35)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
Increase size of category field.
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=50)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
<commit_before>from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=35)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
<commit_msg>Increase size of category field.<commit_after>
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=50)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=35)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
Increase size of category field.from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=50)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
<commit_before>from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=35)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
<commit_msg>Increase size of category field.<commit_after>from django.db import models
class Position(models.Model):
job_id = models.CharField(max_length=25, unique=True)
title = models.CharField(max_length=100)
requisition_id = models.PositiveIntegerField()
category = models.CharField(max_length=50)
job_type = models.CharField(max_length=10)
location = models.CharField(max_length=150)
date = models.CharField(max_length=100)
detail_url = models.URLField()
apply_url = models.URLField()
description = models.TextField()
brief_description = models.TextField(null=True, blank=True)
def __unicode__(self):
return u"%s - %s" % (self.job_id, self.title)
@models.permalink
def get_absolute_url(self):
return ('django_jobvite_position', (), {
'job_id': self.job_id,
})
def to_dict(self):
"""Return the model as a dictionary keyed on ``job_id``."""
fields = self._meta.fields
position_dict = {self.job_id: {}}
for field in fields:
if field.primary_key:
continue
if field.name == 'job_id':
continue
position_dict[self.job_id][field.name] = getattr(self, field.name)
return position_dict
|
2715c9accc8e8abaad72cd9afcec914dda0c6b46
|
pokediadb/utils.py
|
pokediadb/utils.py
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
raise
else:
low = guess
cur.close()
db.close()
return low
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
return 999
else:
low = guess
cur.close()
db.close()
return low
|
Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBER
|
Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBER
|
Python
|
mit
|
Kynarth/pokediadb
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
raise
else:
low = guess
cur.close()
db.close()
return low
Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBER
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
return 999
else:
low = guess
cur.close()
db.close()
return low
|
<commit_before>import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
raise
else:
low = guess
cur.close()
db.close()
return low
<commit_msg>Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBER<commit_after>
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
return 999
else:
low = guess
cur.close()
db.close()
return low
|
import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
raise
else:
low = guess
cur.close()
db.close()
return low
Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBERimport sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
return 999
else:
low = guess
cur.close()
db.close()
return low
|
<commit_before>import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
raise
else:
low = guess
cur.close()
db.close()
return low
<commit_msg>Fix travis error with SQLITE_LIMIT_VARIABLE_NUMBER<commit_after>import sqlite3
def max_sql_variables():
"""Get the maximum number of arguments allowed in a query by the current
sqlite3 implementation.
Returns:
int: SQLITE_MAX_VARIABLE_NUMBER
"""
db = sqlite3.connect(':memory:')
cur = db.cursor()
cur.execute('CREATE TABLE t (test)')
low, high = 0, 100000
while (high - 1) > low:
guess = (high + low) // 2
query = 'INSERT INTO t VALUES ' + ','.join(['(?)' for _ in
range(guess)])
args = [str(i) for i in range(guess)]
try:
cur.execute(query, args)
except sqlite3.OperationalError as e:
if "too many SQL variables" in str(e):
high = guess
else:
return 999
else:
low = guess
cur.close()
db.close()
return low
|
da0478a48329ac79092a4603f4026434af26f032
|
scanblog/scanning/management/commands/fixuploadperms.py
|
scanblog/scanning/management/commands/fixuploadperms.py
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} \\;' % dirname)
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} +' % dirname)
|
Use multi arg option too
|
Use multi arg option too
|
Python
|
agpl-3.0
|
yourcelf/btb,yourcelf/btb,yourcelf/btb,yourcelf/btb,yourcelf/btb
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} \\;' % dirname)
Use multi arg option too
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} +' % dirname)
|
<commit_before>import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} \\;' % dirname)
<commit_msg>Use multi arg option too<commit_after>
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} +' % dirname)
|
import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} \\;' % dirname)
Use multi arg option tooimport os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} +' % dirname)
|
<commit_before>import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} \\;' % dirname)
<commit_msg>Use multi arg option too<commit_after>import os
from django.core.management.base import BaseCommand
from django.conf import settings
class Command(BaseCommand):
args = ''
help = "Set all permissions in the uploads directory for deploy."
def handle(self, *args, **kwargs):
for dirname in (os.path.join(settings.MEDIA_ROOT, settings.UPLOAD_TO),
os.path.join(settings.MEDIA_ROOT, "letters"),
os.path.join(settings.MEDIA_ROOT, "mailings"),
os.path.join(settings.MEDIA_ROOT, "page_picker_thumbs"),
settings.PUBLIC_MEDIA_ROOT):
print dirname
# files: -rw-rw-r--
os.system('sudo chmod -R u=rwX,g=rwX,o=rX "%s"' % dirname)
os.system('sudo chown -R www-data.btb "%s"' % dirname)
# directories: -rwxrwsr-x
os.system('sudo find "%s" -type d -exec sudo chmod g+s {} +' % dirname)
|
5a653d0a6f0c97109254b043e9697675b5863218
|
pyquil/__init__.py
|
pyquil/__init__.py
|
__version__ = "2.0.0b1"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
__version__ = "2.0.0b2.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
Set version back to dev
|
Set version back to dev
|
Python
|
apache-2.0
|
rigetticomputing/pyquil
|
__version__ = "2.0.0b1"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
Set version back to dev
|
__version__ = "2.0.0b2.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
<commit_before>__version__ = "2.0.0b1"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
<commit_msg>Set version back to dev<commit_after>
|
__version__ = "2.0.0b2.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
__version__ = "2.0.0b1"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
Set version back to dev__version__ = "2.0.0b2.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
<commit_before>__version__ = "2.0.0b1"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
<commit_msg>Set version back to dev<commit_after>__version__ = "2.0.0b2.dev0"
from pyquil.quil import Program
from pyquil.api import list_quantum_computers, get_qc
|
42326a18132381f0488b587329fe6b9aaea47c87
|
normandy/selfrepair/views.py
|
normandy/selfrepair/views.py
|
from django.shortcuts import render
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
from django.conf import settings
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
@cache_control(public=True, max_age=settings.API_CACHE_TIME)
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
Add cache headers to self-repair page
|
Add cache headers to self-repair page
|
Python
|
mpl-2.0
|
Osmose/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,Osmose/normandy,mozilla/normandy,mozilla/normandy
|
from django.shortcuts import render
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
Add cache headers to self-repair page
|
from django.conf import settings
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
@cache_control(public=True, max_age=settings.API_CACHE_TIME)
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
<commit_before>from django.shortcuts import render
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
<commit_msg>Add cache headers to self-repair page<commit_after>
|
from django.conf import settings
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
@cache_control(public=True, max_age=settings.API_CACHE_TIME)
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
from django.shortcuts import render
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
Add cache headers to self-repair pagefrom django.conf import settings
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
@cache_control(public=True, max_age=settings.API_CACHE_TIME)
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
<commit_before>from django.shortcuts import render
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
<commit_msg>Add cache headers to self-repair page<commit_after>from django.conf import settings
from django.shortcuts import render
from django.views.decorators.cache import cache_control
from normandy.base.decorators import short_circuit_middlewares
@short_circuit_middlewares
@cache_control(public=True, max_age=settings.API_CACHE_TIME)
def repair(request, locale):
return render(request, 'selfrepair/repair.html', {
'locale': locale,
})
|
9d541eeebe789d61d915dbbc7fd5792e244bd93f
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list( 'DataSource', AdminControl.getCell())
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
Create script to save documentation to a file
|
4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list( 'DataSource', AdminControl.getCell())
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list( 'DataSource', AdminControl.getCell())
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list( 'DataSource', AdminControl.getCell())
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )<commit_msg>4: Create script to save documentation to a file
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/4<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
dbs = AdminConfig.list( 'DataSource', AdminControl.getCell())
for db in dbs:
t1 = ibmcnx.functions.getDSId( db )
AdminConfig.list( t1 )
|
8fa72cea635171e94f0fb5538bc82197c6890b36
|
tests/issues/test_issue0619.py
|
tests/issues/test_issue0619.py
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error(self):
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error():
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
FIX problem after reorganizing test.
|
FIX problem after reorganizing test.
|
Python
|
bsd-2-clause
|
jenisys/behave,jenisys/behave
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error(self):
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
FIX problem after reorganizing test.
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error():
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
<commit_before># -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error(self):
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
<commit_msg>FIX problem after reorganizing test.<commit_after>
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error():
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error(self):
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
FIX problem after reorganizing test.# -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error():
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
<commit_before># -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error(self):
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
<commit_msg>FIX problem after reorganizing test.<commit_after># -*- coding: UTF-8 -*-
"""
https://github.com/behave/behave/issues/619
When trying to do something like::
foo = getattr(context, '_foo', 'bar')
Behave fails with::
File "[...]/behave/runner.py", line 208, in __getattr__
return self.__dict__[attr]
KeyError: '_foo'
I think this is because the __getattr__ method in Context (here) don't raise
properly AttributeError when the key don't exists in the dict,
so the default behaviour of getattr is not executed (see docs).
"""
from __future__ import absolute_import
from behave.runner import Context, scoped_context_layer
from mock import Mock
def test_issue__getattr_with_protected_unknown_context_attribute_raises_no_error():
context = Context(runner=Mock())
with scoped_context_layer(context): # CALLS-HERE: context._push()
value = getattr(context, "_UNKNOWN_ATTRIB", "__UNKNOWN__")
assert value == "__UNKNOWN__"
# -- ENSURED: No exception is raised, neither KeyError nor AttributeError
|
0a522863dce6e42bf66c66a56078c00901c64f52
|
redash/__init__.py
|
redash/__init__.py
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
Use database number from redis url if available.
|
Use database number from redis url if available.
|
Python
|
bsd-2-clause
|
denisov-vlad/redash,EverlyWell/redash,44px/redash,rockwotj/redash,M32Media/redash,guaguadev/redash,stefanseifert/redash,hudl/redash,easytaxibr/redash,denisov-vlad/redash,ninneko/redash,jmvasquez/redashtest,crowdworks/redash,easytaxibr/redash,getredash/redash,pubnative/redash,rockwotj/redash,crowdworks/redash,stefanseifert/redash,getredash/redash,vishesh92/redash,chriszs/redash,crowdworks/redash,alexanderlz/redash,jmvasquez/redashtest,chriszs/redash,44px/redash,jmvasquez/redashtest,stefanseifert/redash,EverlyWell/redash,ninneko/redash,moritz9/redash,easytaxibr/redash,pubnative/redash,getredash/redash,guaguadev/redash,imsally/redash,akariv/redash,amino-data/redash,jmvasquez/redashtest,getredash/redash,akariv/redash,M32Media/redash,stefanseifert/redash,hudl/redash,akariv/redash,akariv/redash,pubnative/redash,rockwotj/redash,amino-data/redash,vishesh92/redash,easytaxibr/redash,rockwotj/redash,ninneko/redash,ninneko/redash,M32Media/redash,M32Media/redash,guaguadev/redash,44px/redash,vishesh92/redash,chriszs/redash,jmvasquez/redashtest,imsally/redash,vishesh92/redash,ninneko/redash,alexanderlz/redash,imsally/redash,useabode/redash,pubnative/redash,pubnative/redash,moritz9/redash,amino-data/redash,useabode/redash,imsally/redash,crowdworks/redash,moritz9/redash,EverlyWell/redash,denisov-vlad/redash,hudl/redash,amino-data/redash,getredash/redash,hudl/redash,moritz9/redash,44px/redash,guaguadev/redash,alexanderlz/redash,guaguadev/redash,denisov-vlad/redash,denisov-vlad/redash,stefanseifert/redash,chriszs/redash,easytaxibr/redash,EverlyWell/redash,akariv/redash,alexanderlz/redash,useabode/redash,useabode/redash
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllersUse database number from redis url if available.
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
<commit_before>import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers<commit_msg>Use database number from redis url if available.<commit_after>
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllersUse database number from redis url if available.import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
<commit_before>import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=0, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers<commit_msg>Use database number from redis url if available.<commit_after>import json
import urlparse
from flask import Flask, make_response
from flask.ext.restful import Api
from flask_peewee.db import Database
import redis
from redash import settings, utils
__version__ = '0.3.2'
app = Flask(__name__,
template_folder=settings.STATIC_ASSETS_PATH,
static_folder=settings.STATIC_ASSETS_PATH,
static_path='/static')
api = Api(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
db = Database(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp
redis_url = urlparse.urlparse(settings.REDIS_URL)
if redis_url.path:
redis_db = redis_url.path[1]
else:
redis_db = 0
redis_connection = redis.StrictRedis(host=redis_url.hostname, port=redis_url.port, db=redis_db, password=redis_url.password)
from redash import data
data_manager = data.Manager(redis_connection, db)
from redash import controllers
|
86418b48ca9bef5d0cd7cbf8468abfad633b56ed
|
write_csv.py
|
write_csv.py
|
"""Export all responses from yesterday and save them to a CSV file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
|
"""Export all responses from yesterday and save them to a .csv file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
try:
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
except:
pass
|
Add try/except in case select function fails
|
Add try/except in case select function fails
|
Python
|
mit
|
andrewlrogers/srvy
|
"""Export all responses from yesterday and save them to a CSV file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
Add try/except in case select function fails
|
"""Export all responses from yesterday and save them to a .csv file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
try:
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
except:
pass
|
<commit_before>"""Export all responses from yesterday and save them to a CSV file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
<commit_msg>Add try/except in case select function fails<commit_after>
|
"""Export all responses from yesterday and save them to a .csv file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
try:
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
except:
pass
|
"""Export all responses from yesterday and save them to a CSV file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
Add try/except in case select function fails"""Export all responses from yesterday and save them to a .csv file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
try:
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
except:
pass
|
<commit_before>"""Export all responses from yesterday and save them to a CSV file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
<commit_msg>Add try/except in case select function fails<commit_after>"""Export all responses from yesterday and save them to a .csv file."""
import sqlite3
import csv
from datetime import datetime, timedelta
import os
today = str(datetime.now().strftime('%Y-%m-%d'))
yesterday = str((datetime.now() - timedelta(days=1)).strftime('%Y-%m-%d'))
export_directory = 'export'
export_filename = 'srvy' + yesterday + '.csv'
full_export_path = os.path.join(export_directory, export_filename)
sqlite_file = 'srvy.db'
table_name = 'responses'
date_column = 'date'
time_column = 'time'
score_column = 'score'
question_column = 'question'
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
#c.execute("SELECT * FROM responses WHERE date LIKE '%"+ current_date +"%'")
try:
c.execute("SELECT * FROM responses WHERE date = ?", (yesterday,))
csvWriter = csv.writer(open(full_export_path, 'w'))
rows = c.fetchall()
for row in rows:
csvWriter.writerow(row)
except:
pass
|
35a046a61d0acc1d6b7a1c084077bdf9ed7ff720
|
tests/utils/parse_worksheet.py
|
tests/utils/parse_worksheet.py
|
import unittest
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_sheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
def test_parse_worksheet_function_calls_open_worksheet(self):
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
import unittest
from unittest.mock import patch
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_worksheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
Remove testing of private methods (other than that they exist)
|
Remove testing of private methods (other than that they exist)
|
Python
|
mit
|
jdgillespie91/trackerSpend,jdgillespie91/trackerSpend
|
import unittest
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_sheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
def test_parse_worksheet_function_calls_open_worksheet(self):
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
Remove testing of private methods (other than that they exist)
|
import unittest
from unittest.mock import patch
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_worksheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
<commit_before>import unittest
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_sheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
def test_parse_worksheet_function_calls_open_worksheet(self):
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Remove testing of private methods (other than that they exist)<commit_after>
|
import unittest
from unittest.mock import patch
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_worksheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
import unittest
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_sheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
def test_parse_worksheet_function_calls_open_worksheet(self):
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
Remove testing of private methods (other than that they exist)import unittest
from unittest.mock import patch
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_worksheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
<commit_before>import unittest
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_sheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
def test_parse_worksheet_function_calls_open_worksheet(self):
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
<commit_msg>Remove testing of private methods (other than that they exist)<commit_after>import unittest
from unittest.mock import patch
from utils import parse_worksheet
class TestParseWorksheet(unittest.TestCase):
def test_open_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, '__open_worksheet'):
self.fail('__open_worksheet should be defined.')
def test_get_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__get_data'):
self.fail('__get_data should be defined.')
def test_write_data_function_is_defined(self):
if not hasattr(parse_worksheet, '__write_data'):
self.fail('__write_data should be defined.')
def test_parse_worksheet_function_is_defined(self):
if not hasattr(parse_worksheet, 'parse_worksheet'):
self.fail('parse_worksheet should be defined.')
if __name__ == '__main__':
suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestParseWorksheet)
unittest.TextTestRunner(verbosity=2).run(suite)
|
fa7228e26791987e43fdcb216f98658b45a8b220
|
slave/skia_slave_scripts/run_gm.py
|
slave/skia_slave_scripts/run_gm.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--exclude-config', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--excludeConfig', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
Fix buildbot flag to GM
|
Fix buildbot flag to GM
Unreviewed
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8282 2bbb7eff-a529-9590-31e7-b0007b416f81
|
Python
|
bsd-3-clause
|
google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot,Tiger66639/skia-buildbot,google/skia-buildbot,google/skia-buildbot,Tiger66639/skia-buildbot
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--exclude-config', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
Fix buildbot flag to GM
Unreviewed
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8282 2bbb7eff-a529-9590-31e7-b0007b416f81
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--excludeConfig', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--exclude-config', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
<commit_msg>Fix buildbot flag to GM
Unreviewed
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8282 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--excludeConfig', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--exclude-config', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
Fix buildbot flag to GM
Unreviewed
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8282 2bbb7eff-a529-9590-31e7-b0007b416f81#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--excludeConfig', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--exclude-config', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
<commit_msg>Fix buildbot flag to GM
Unreviewed
git-svn-id: 32fc27f4dcfb6c0385cd9719852b95fe6680452d@8282 2bbb7eff-a529-9590-31e7-b0007b416f81<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Run the Skia GM executable. """
from build_step import BuildStep
import os
import sys
JSON_SUMMARY_FILENAME = 'actual-results.json'
class RunGM(BuildStep):
def _Run(self):
output_dir = os.path.join(self._device_dirs.GMDir(), self._gm_image_subdir)
cmd = ['--writePath', output_dir,
'--writeJsonSummaryPath', os.path.join(output_dir,
JSON_SUMMARY_FILENAME),
] + self._gm_args
# msaa16 is flaky on Macs (driver bug?) so we skip the test for now
if sys.platform == 'darwin':
cmd.extend(['--excludeConfig', 'msaa16'])
self.RunFlavoredCmd('gm', cmd)
if '__main__' == __name__:
sys.exit(BuildStep.RunBuildStep(RunGM))
|
e5940200612b293b1cecff4c6a683ecefa684345
|
dirMonitor.py
|
dirMonitor.py
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s %13s %13s"
fmtd = "%*s %13d %13d %13d"
n = 0
while True:
print fmts % (15, "dir", "size", "avg", "max")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename)) for dirpath, dirnames, filenames in os.walk( folder ) for filename in filenames )
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmtd % (15, folder, bytes, avg, maxSize[folder])
print ""
time.sleep(1)
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
nameWidth = max([len(f) for f in folders])
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s%s %13s%s %13s%s"
n = 0
while True:
print fmts % (nameWidth, "directory", "curr size", " ", "avg size", " ", "max size", " ")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk( folder )
for filename in filenames )
oldSize = currSize[folder]
oldAvg = 1 if n == 1 else totalSize[folder]/(n-1)
oldMax = maxSize[folder]
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmts % (nameWidth, folder,
"{:,}".format(bytes), "+" if bytes > oldSize else "-" if bytes < oldSize else " ",
"{:,}".format(avg), "+" if avg > oldAvg else "-" if avg < oldAvg else " ",
"{:,}".format(maxSize[folder]), "+" if maxSize[folder] > oldMax else " ")
print ""
time.sleep(2)
|
Add formatting to directory monitor.
|
Add formatting to directory monitor.
|
Python
|
apache-2.0
|
jskora/scratch-nifi,jskora/scratch-nifi,jskora/scratch-nifi
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s %13s %13s"
fmtd = "%*s %13d %13d %13d"
n = 0
while True:
print fmts % (15, "dir", "size", "avg", "max")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename)) for dirpath, dirnames, filenames in os.walk( folder ) for filename in filenames )
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmtd % (15, folder, bytes, avg, maxSize[folder])
print ""
time.sleep(1)
Add formatting to directory monitor.
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
nameWidth = max([len(f) for f in folders])
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s%s %13s%s %13s%s"
n = 0
while True:
print fmts % (nameWidth, "directory", "curr size", " ", "avg size", " ", "max size", " ")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk( folder )
for filename in filenames )
oldSize = currSize[folder]
oldAvg = 1 if n == 1 else totalSize[folder]/(n-1)
oldMax = maxSize[folder]
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmts % (nameWidth, folder,
"{:,}".format(bytes), "+" if bytes > oldSize else "-" if bytes < oldSize else " ",
"{:,}".format(avg), "+" if avg > oldAvg else "-" if avg < oldAvg else " ",
"{:,}".format(maxSize[folder]), "+" if maxSize[folder] > oldMax else " ")
print ""
time.sleep(2)
|
<commit_before>#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s %13s %13s"
fmtd = "%*s %13d %13d %13d"
n = 0
while True:
print fmts % (15, "dir", "size", "avg", "max")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename)) for dirpath, dirnames, filenames in os.walk( folder ) for filename in filenames )
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmtd % (15, folder, bytes, avg, maxSize[folder])
print ""
time.sleep(1)
<commit_msg>Add formatting to directory monitor.<commit_after>
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
nameWidth = max([len(f) for f in folders])
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s%s %13s%s %13s%s"
n = 0
while True:
print fmts % (nameWidth, "directory", "curr size", " ", "avg size", " ", "max size", " ")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk( folder )
for filename in filenames )
oldSize = currSize[folder]
oldAvg = 1 if n == 1 else totalSize[folder]/(n-1)
oldMax = maxSize[folder]
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmts % (nameWidth, folder,
"{:,}".format(bytes), "+" if bytes > oldSize else "-" if bytes < oldSize else " ",
"{:,}".format(avg), "+" if avg > oldAvg else "-" if avg < oldAvg else " ",
"{:,}".format(maxSize[folder]), "+" if maxSize[folder] > oldMax else " ")
print ""
time.sleep(2)
|
#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s %13s %13s"
fmtd = "%*s %13d %13d %13d"
n = 0
while True:
print fmts % (15, "dir", "size", "avg", "max")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename)) for dirpath, dirnames, filenames in os.walk( folder ) for filename in filenames )
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmtd % (15, folder, bytes, avg, maxSize[folder])
print ""
time.sleep(1)
Add formatting to directory monitor.#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
nameWidth = max([len(f) for f in folders])
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s%s %13s%s %13s%s"
n = 0
while True:
print fmts % (nameWidth, "directory", "curr size", " ", "avg size", " ", "max size", " ")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk( folder )
for filename in filenames )
oldSize = currSize[folder]
oldAvg = 1 if n == 1 else totalSize[folder]/(n-1)
oldMax = maxSize[folder]
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmts % (nameWidth, folder,
"{:,}".format(bytes), "+" if bytes > oldSize else "-" if bytes < oldSize else " ",
"{:,}".format(avg), "+" if avg > oldAvg else "-" if avg < oldAvg else " ",
"{:,}".format(maxSize[folder]), "+" if maxSize[folder] > oldMax else " ")
print ""
time.sleep(2)
|
<commit_before>#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s %13s %13s"
fmtd = "%*s %13d %13d %13d"
n = 0
while True:
print fmts % (15, "dir", "size", "avg", "max")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename)) for dirpath, dirnames, filenames in os.walk( folder ) for filename in filenames )
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmtd % (15, folder, bytes, avg, maxSize[folder])
print ""
time.sleep(1)
<commit_msg>Add formatting to directory monitor.<commit_after>#!/usr/bin/env python
import os, sys, time
folders = sys.argv[1:]
nameWidth = max([len(f) for f in folders])
currSize = dict((x, 0) for x in folders)
totalSize = dict((x, 0) for x in folders)
maxSize = dict((x, 0) for x in folders)
fmts = "%*s %13s%s %13s%s %13s%s"
n = 0
while True:
print fmts % (nameWidth, "directory", "curr size", " ", "avg size", " ", "max size", " ")
n += 1
for folder in folders:
bytes = sum( os.path.getsize(os.path.join(dirpath, filename))
for dirpath, dirnames, filenames in os.walk( folder )
for filename in filenames )
oldSize = currSize[folder]
oldAvg = 1 if n == 1 else totalSize[folder]/(n-1)
oldMax = maxSize[folder]
currSize[folder] = bytes
totalSize[folder] += bytes
maxSize[folder] = max(maxSize[folder], bytes)
avg = totalSize[folder] / n
print fmts % (nameWidth, folder,
"{:,}".format(bytes), "+" if bytes > oldSize else "-" if bytes < oldSize else " ",
"{:,}".format(avg), "+" if avg > oldAvg else "-" if avg < oldAvg else " ",
"{:,}".format(maxSize[folder]), "+" if maxSize[folder] > oldMax else " ")
print ""
time.sleep(2)
|
e188e324f1c7b8afab3b65b9e7337a0b1c3981f0
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('https://tornadoweb.org/en/latest/', None),
}
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('http://www.tornadoweb.org/en/latest/', None),
}
|
Correct inter sphinx path to Tornado docs.
|
Correct inter sphinx path to Tornado docs.
|
Python
|
bsd-3-clause
|
sprockets/sprockets.mixins.cors
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('https://tornadoweb.org/en/latest/', None),
}
Correct inter sphinx path to Tornado docs.
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('http://www.tornadoweb.org/en/latest/', None),
}
|
<commit_before>#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('https://tornadoweb.org/en/latest/', None),
}
<commit_msg>Correct inter sphinx path to Tornado docs.<commit_after>
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('http://www.tornadoweb.org/en/latest/', None),
}
|
#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('https://tornadoweb.org/en/latest/', None),
}
Correct inter sphinx path to Tornado docs.#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('http://www.tornadoweb.org/en/latest/', None),
}
|
<commit_before>#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('https://tornadoweb.org/en/latest/', None),
}
<commit_msg>Correct inter sphinx path to Tornado docs.<commit_after>#!/usr/bin/env python
import alabaster
from sprockets.mixins import cors
project = 'sprockets.mixins.cors'
copyright = '2015, AWeber Communication, Inc.'
version = cors.__version__
release = '.'.join(str(v) for v in cors.version_info[0:2])
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.httpdomain',
]
templates_path = []
source_suffix = '.rst'
master_doc = 'index'
exclude_patterns = []
pygments_style = 'sphinx'
html_theme = 'alabaster'
html_style = 'custom.css'
html_theme_path = [alabaster.get_path()]
html_static_path = ['static']
html_sidebars = {
'**': ['about.html', 'navigation.html'],
}
html_theme_options = {
'github_user': 'sprockets',
'github_repo': 'sprockets.mixins.cors',
'description': 'Tornado CORS helper',
'github_banner': True,
'travis_button': True,
}
intersphinx_mapping = {
'python': ('https://docs.python.org/', None),
'tornado': ('http://www.tornadoweb.org/en/latest/', None),
}
|
b13baaa37133b7d4fe46682dbce7ed94d46ecaf4
|
docs/conf.py
|
docs/conf.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
Add viewcode to sphinx docs
|
Add viewcode to sphinx docs
|
Python
|
mit
|
rosswhitfield/javelin
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
Add viewcode to sphinx docs
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
<commit_msg>Add viewcode to sphinx docs<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
Add viewcode to sphinx docs#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
<commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
<commit_msg>Add viewcode to sphinx docs<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode'
]
source_suffix = '.rst'
master_doc = 'index'
project = 'Javelin'
copyright = '2017, Ross Whitfield'
author = 'Ross Whitfield'
version = '0.1.0'
release = '0.1.0'
exclude_patterns = ['_build']
pygments_style = 'friendly'
html_theme = 'sphinx_rtd_theme'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Javelindoc'
latex_documents = [
(master_doc, 'Javelin.tex', 'Javelin Documentation',
'Ross Whitfield', 'manual'),
]
intersphinx_mapping = {'numpy': ('https://docs.scipy.org/doc/numpy/', None),
'xarray': ('http://xarray.pydata.org/en/stable/', None),
'ase': ('https://wiki.fysik.dtu.dk/ase/', None),
'diffpy.Structure': ('http://www.diffpy.org/diffpy.structure/', None)}
autodoc_default_flags = ['members', 'undoc-members']
|
88f02fbea11390ec8866c29912ed8beadc31e736
|
admin/common_auth/forms.py
|
admin/common_auth/forms.py
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
Exclude preprints from queryset from account/register in the admin app.
|
Exclude preprints from queryset from account/register in the admin app.
|
Python
|
apache-2.0
|
adlius/osf.io,mfraezz/osf.io,cslzchen/osf.io,mfraezz/osf.io,baylee-d/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,pattisdr/osf.io,aaxelb/osf.io,adlius/osf.io,adlius/osf.io,felliott/osf.io,felliott/osf.io,mfraezz/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,mattclark/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,cslzchen/osf.io,felliott/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mattclark/osf.io,aaxelb/osf.io,saradbowman/osf.io,cslzchen/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
Exclude preprints from queryset from account/register in the admin app.
|
from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Exclude preprints from queryset from account/register in the admin app.<commit_after>
|
from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
Exclude preprints from queryset from account/register in the admin app.from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
<commit_before>from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
<commit_msg>Exclude preprints from queryset from account/register in the admin app.<commit_after>from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
|
d12fecd2eb012862b8d7654c879dccf5ccce833f
|
jose/backends/__init__.py
|
jose/backends/__init__.py
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
Enable Python RSA backend as a fallback.
|
Enable Python RSA backend as a fallback.
|
Python
|
mit
|
mpdavis/python-jose
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
Enable Python RSA backend as a fallback.
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
<commit_before>
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
<commit_msg>Enable Python RSA backend as a fallback.<commit_after>
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
Enable Python RSA backend as a fallback.
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
<commit_before>
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
<commit_msg>Enable Python RSA backend as a fallback.<commit_after>
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
69e8798137ca63b78adf0c41582e89973d2ea129
|
create.py
|
create.py
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
Work on model file handling
|
Work on model file handling
|
Python
|
agpl-3.0
|
edx/ease,edx/ease
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
Work on model file handling
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
<commit_before>import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
<commit_msg>Work on model file handling<commit_after>
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
Work on model file handlingimport os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
<commit_before>import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
<commit_msg>Work on model file handling<commit_after>import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
|
cb97f453284658da56d12ab696ef6b7d7991c727
|
dipy/io/tests/test_csareader.py
|
dipy/io/tests/test_csareader.py
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
|
TEST - add test for value
|
TEST - add test for value
|
Python
|
bsd-3-clause
|
jyeatman/dipy,beni55/dipy,samuelstjean/dipy,FrancoisRheaultUS/dipy,demianw/dipy,demianw/dipy,nilgoyyou/dipy,jyeatman/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,Messaoud-Boudjada/dipy,StongeEtienne/dipy,villalonreina/dipy,JohnGriffiths/dipy,rfdougherty/dipy,villalonreina/dipy,sinkpoint/dipy,JohnGriffiths/dipy,FrancoisRheaultUS/dipy,maurozucchelli/dipy,oesteban/dipy,sinkpoint/dipy,samuelstjean/dipy,samuelstjean/dipy,StongeEtienne/dipy,mdesco/dipy,rfdougherty/dipy,oesteban/dipy,matthieudumont/dipy,matthieudumont/dipy,beni55/dipy,maurozucchelli/dipy,mdesco/dipy,nilgoyyou/dipy
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
TEST - add test for value
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
|
<commit_before>""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
<commit_msg>TEST - add test for value<commit_after>
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
|
""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
TEST - add test for value""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
|
<commit_before>""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
<commit_msg>TEST - add test for value<commit_after>""" Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
|
63ca4ab4fc7237a9b32d82d73160b7f02c3ac133
|
settings.py
|
settings.py
|
# coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
# coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
Add "config file was not fount" error handler
|
Add "config file was not fount" error handler
|
Python
|
mit
|
vv-p/jira-reports,vv-p/jira-reports
|
# coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
Add "config file was not fount" error handler
|
# coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
<commit_before># coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
<commit_msg>Add "config file was not fount" error handler<commit_after>
|
# coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
# coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
Add "config file was not fount" error handler# coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
<commit_before># coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
<commit_msg>Add "config file was not fount" error handler<commit_after># coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
|
0ad6cb338bbf10c48049d5649b5cd41eab0ed8d1
|
prawcore/sessions.py
|
prawcore/sessions.py
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
Add optional authorizer parameter to session class and function.
|
Add optional authorizer parameter to session class and function.
|
Python
|
bsd-2-clause
|
praw-dev/prawcore
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
Add optional authorizer parameter to session class and function.
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
<commit_before>"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
<commit_msg>Add optional authorizer parameter to session class and function.<commit_after>
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
Add optional authorizer parameter to session class and function."""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
<commit_before>"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
<commit_msg>Add optional authorizer parameter to session class and function.<commit_after>"""prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
|
4fa8f7cb8a0592ed1d37efa20fd4a23d12e88713
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
Update regexp due to changes in stylint
|
Update regexp due to changes in stylint
|
Python
|
mit
|
jackbrewer/SublimeLinter-contrib-stylint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Update regexp due to changes in stylint
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Update regexp due to changes in stylint<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
Update regexp due to changes in stylint#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
<commit_msg>Update regexp due to changes in stylint<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
266e0976ee41e4dd1a9c543c84d422a8fba61230
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
Check if epages6 settings are configured
|
Check if epages6 settings are configured
|
Python
|
mit
|
ePages-rnd/SublimeLinter-contrib-tlec
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
Check if epages6 settings are configured
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
<commit_msg>Check if epages6 settings are configured<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
Check if epages6 settings are configured#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
<commit_msg>Check if epages6 settings are configured<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
|
fe35867409af3bdf9898b68ce356ef00b865ff29
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
Change version to 1.0.2 (stable)
|
Change version to 1.0.2 (stable)
|
Python
|
agpl-3.0
|
xcgd/account_credit_transfer
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.2 (stable)
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.2 (stable)<commit_after>
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.2 (stable)# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.2 (stable)<commit_after># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.2",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
563f2e153437e7f78e05ed9dade1bd1690bef6a5
|
karspexet/ticket/admin.py
|
karspexet/ticket/admin.py
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'session_timeout', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
Add session_timeout to ReservationAdmin list_display
|
Add session_timeout to ReservationAdmin list_display
|
Python
|
mit
|
Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet,Karspexet/Karspexet
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
Add session_timeout to ReservationAdmin list_display
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'session_timeout', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
<commit_before>from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
<commit_msg>Add session_timeout to ReservationAdmin list_display<commit_after>
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'session_timeout', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
Add session_timeout to ReservationAdmin list_displayfrom django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'session_timeout', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
<commit_before>from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
<commit_msg>Add session_timeout to ReservationAdmin list_display<commit_after>from django.contrib import admin
from karspexet.ticket.models import Account, Reservation, Ticket, Voucher, PricingModel
class ReservationAdmin(admin.ModelAdmin):
list_display = ('show', 'total', 'finalized', 'reservation_code', 'session_timeout', 'tickets')
list_filter = ('finalized', 'show')
class TicketAdmin(admin.ModelAdmin):
list_display = ('price', 'ticket_type', 'show', 'seat', 'account', 'ticket_code')
class VoucherAdmin(admin.ModelAdmin):
list_display = ('amount', 'code', 'expiry_date', 'created_by')
list_filter = ('expiry_date', 'created_by')
class PricingModelAdmin(admin.ModelAdmin):
list_display = ('seating_group', 'prices', 'valid_from')
class AccountAdmin(admin.ModelAdmin):
list_display = ('name', 'email', 'phone')
admin.site.register(Reservation, ReservationAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(Voucher, VoucherAdmin)
admin.site.register(PricingModel, PricingModelAdmin)
admin.site.register(Account, AccountAdmin)
|
24f1f686c5cdc9a2272adbea7d1c2e1eb481dc8d
|
tests/unit/fakes.py
|
tests/unit/fakes.py
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
Make vertical white space after license header consistent
|
Trivial: Make vertical white space after license header consistent
Vertical white space between license header and the actual code is not consistent
across files. It looks like majority of the files leaves a single blank line
after license header. So make it consistent except for those exceptional cases
where the actual code starts with a function or class definition.
Change-Id: If6679ab19dc2d7a9df93e076db3019abc1e5b86b
|
Python
|
apache-2.0
|
varunarya10/oslo.i18n,openstack/oslo.i18n
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
Trivial: Make vertical white space after license header consistent
Vertical white space between license header and the actual code is not consistent
across files. It looks like majority of the files leaves a single blank line
after license header. So make it consistent except for those exceptional cases
where the actual code starts with a function or class definition.
Change-Id: If6679ab19dc2d7a9df93e076db3019abc1e5b86b
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
<commit_before># Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
<commit_msg>Trivial: Make vertical white space after license header consistent
Vertical white space between license header and the actual code is not consistent
across files. It looks like majority of the files leaves a single blank line
after license header. So make it consistent except for those exceptional cases
where the actual code starts with a function or class definition.
Change-Id: If6679ab19dc2d7a9df93e076db3019abc1e5b86b<commit_after>
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
Trivial: Make vertical white space after license header consistent
Vertical white space between license header and the actual code is not consistent
across files. It looks like majority of the files leaves a single blank line
after license header. So make it consistent except for those exceptional cases
where the actual code starts with a function or class definition.
Change-Id: If6679ab19dc2d7a9df93e076db3019abc1e5b86b# Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
<commit_before># Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
<commit_msg>Trivial: Make vertical white space after license header consistent
Vertical white space between license header and the actual code is not consistent
across files. It looks like majority of the files leaves a single blank line
after license header. So make it consistent except for those exceptional cases
where the actual code starts with a function or class definition.
Change-Id: If6679ab19dc2d7a9df93e076db3019abc1e5b86b<commit_after># Copyright 2012 Intel Inc, OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Fakes For filter and weight tests.
"""
from openstack.common.scheduler import weights
class FakeWeigher1(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeWeigher2(weights.BaseHostWeigher):
def __init__(self):
pass
class FakeClass(object):
def __init__(self):
pass
|
aad92644d01994685d20121def511da2765adfad
|
src/data.py
|
src/data.py
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
Add neighbrhood property to row
|
Add neighbrhood property to row
|
Python
|
unlicense
|
datascopeanalytics/chicago-new-business,datascopeanalytics/chicago-new-business
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
Add neighbrhood property to row
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
<commit_before>import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
<commit_msg>Add neighbrhood property to row<commit_after>
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
Add neighbrhood property to rowimport csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
<commit_before>import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
<commit_msg>Add neighbrhood property to row<commit_after>import csv
import datetime
class Row(dict):
def __init__(self, *args, **kwargs):
super(Row, self).__init__(*args, **kwargs)
self._start_date = None
self._end_date = None
def _cast_date(self, s):
if not s:
return None
return datetime.datetime.strptime(s, '%m/%d/%Y').date()
def _get_date_or_cast(self, s, attr):
if getattr(self, attr) is None:
setattr(self, attr, self._cast_date(s))
return getattr(self, attr)
@property
def start_date(self):
return self._get_date_or_cast(
self['DATE ISSUED'],
'_start_date',
)
@property
def end_date(self):
return self._get_date_or_cast(
self['LICENSE TERM EXPIRATION DATE'],
'_end_date',
)
@property
def account_number(self):
return self['ACCOUNT NUMBER']
@property
def neighborhood(self):
return self['NEIGHBORHOOD']
class RawReader(csv.DictReader):
def __iter__(self, *args, **kwargs):
row = self.next()
while row:
yield Row(row)
row = self.next()
class RawWriter(csv.DictWriter):
pass
|
7535bd611b26fa81944058c49e7238bd67a5f577
|
forms_builder/wrapper/forms.py
|
forms_builder/wrapper/forms.py
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
exclude = ('sites', 'redirect_url', 'login_required', 'send_email', 'email_from',
'email_copies', 'email_subject', 'email_message')
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
Exclude unnecessary fields for Enjaz
|
Exclude unnecessary fields for Enjaz
|
Python
|
agpl-3.0
|
enjaz/enjaz,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)Exclude unnecessary fields for Enjaz
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
exclude = ('sites', 'redirect_url', 'login_required', 'send_email', 'email_from',
'email_copies', 'email_subject', 'email_message')
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
<commit_before>from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)<commit_msg>Exclude unnecessary fields for Enjaz<commit_after>
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
exclude = ('sites', 'redirect_url', 'login_required', 'send_email', 'email_from',
'email_copies', 'email_subject', 'email_message')
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)Exclude unnecessary fields for Enjazfrom django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
exclude = ('sites', 'redirect_url', 'login_required', 'send_email', 'email_from',
'email_copies', 'email_subject', 'email_message')
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
<commit_before>from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)<commit_msg>Exclude unnecessary fields for Enjaz<commit_after>from django import forms
from django.forms.models import inlineformset_factory
from forms_builder.forms.models import Form, Field
class FormToBuildForm(forms.ModelForm):
"""
A form that is used to create or edit an instance of ``forms.models.Form``.
"""
class Meta:
model = Form
exclude = ('sites', 'redirect_url', 'login_required', 'send_email', 'email_from',
'email_copies', 'email_subject', 'email_message')
# A form set to manage adding, modifying, or deleting fields of a form
FieldFormSet = inlineformset_factory(Form, Field, exclude=('slug',), extra=1, can_delete=True)
|
eaec82bb0a4a11f683c34550bdc23b3c6b0c48d2
|
examples/M1/M1_export.py
|
examples/M1/M1_export.py
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExport(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1') # create and export network to NeuroML 2
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExportNeuroML2(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1',
connections=True,
stimulations=True) # create and export network to NeuroML 2
|
Update script to export to nml2 of m1
|
Update script to export to nml2 of m1
|
Python
|
mit
|
Neurosim-lab/netpyne,thekerrlab/netpyne,Neurosim-lab/netpyne
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExport(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1') # create and export network to NeuroML 2Update script to export to nml2 of m1
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExportNeuroML2(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1',
connections=True,
stimulations=True) # create and export network to NeuroML 2
|
<commit_before>import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExport(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1') # create and export network to NeuroML 2<commit_msg>Update script to export to nml2 of m1<commit_after>
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExportNeuroML2(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1',
connections=True,
stimulations=True) # create and export network to NeuroML 2
|
import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExport(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1') # create and export network to NeuroML 2Update script to export to nml2 of m1import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExportNeuroML2(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1',
connections=True,
stimulations=True) # create and export network to NeuroML 2
|
<commit_before>import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExport(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1') # create and export network to NeuroML 2<commit_msg>Update script to export to nml2 of m1<commit_after>import M1 # import parameters file
from netpyne import sim # import netpyne sim module
sim.createAndExportNeuroML2(netParams = M1.netParams,
simConfig = M1.simConfig,
reference = 'M1',
connections=True,
stimulations=True) # create and export network to NeuroML 2
|
565c95ce9a8ff96d177196c6dbf8d8f88cdfa029
|
poyo/exceptions.py
|
poyo/exceptions.py
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
Add an error class for string data that is ignored by the parser
|
Add an error class for string data that is ignored by the parser
|
Python
|
mit
|
hackebrot/poyo
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
Add an error class for string data that is ignored by the parser
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
<commit_before># -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
<commit_msg>Add an error class for string data that is ignored by the parser<commit_after>
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
Add an error class for string data that is ignored by the parser# -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
<commit_before># -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
<commit_msg>Add an error class for string data that is ignored by the parser<commit_after># -*- coding: utf-8 -*-
class PoyoException(Exception):
"""Super class for all of Poyo's exceptions."""
class NoMatchException(PoyoException):
"""Raised when the parser cannot find a pattern that matches the given
string.
"""
class NoParentException(PoyoException):
"""Raised when there is no parser object at the given level.
"""
class NoTypeException(PoyoException):
"""Raised when the parser is unable to determine the actual type for a
given string.
"""
class IgnoredMatchException(PoyoException):
"""Raised when a match does result in a Python representation such as a
comment or a blank line.
"""
|
fb02617b29cab97a70a1a11b0d3b7b62b834aa3b
|
server.py
|
server.py
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
pass
elif type == 'doctor:':
pass
elif type == 'female':
pass
elif type == 'male':
pass
else:
return "No files here\n"
return "Sent files\n"
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'doctor:':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'female':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'male':
gzip_address = './zipfiles/doc.tar.gz'
else:
return "No files here\n"
gzip_file = open(gzip_address).read()
return bytearray(gzip_file)
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
Structure for sending dummy files
|
Structure for sending dummy files
|
Python
|
mit
|
rotemh/soteria
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
pass
elif type == 'doctor:':
pass
elif type == 'female':
pass
elif type == 'male':
pass
else:
return "No files here\n"
return "Sent files\n"
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
Structure for sending dummy files
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'doctor:':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'female':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'male':
gzip_address = './zipfiles/doc.tar.gz'
else:
return "No files here\n"
gzip_file = open(gzip_address).read()
return bytearray(gzip_file)
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
pass
elif type == 'doctor:':
pass
elif type == 'female':
pass
elif type == 'male':
pass
else:
return "No files here\n"
return "Sent files\n"
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
<commit_msg>Structure for sending dummy files<commit_after>
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'doctor:':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'female':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'male':
gzip_address = './zipfiles/doc.tar.gz'
else:
return "No files here\n"
gzip_file = open(gzip_address).read()
return bytearray(gzip_file)
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
pass
elif type == 'doctor:':
pass
elif type == 'female':
pass
elif type == 'male':
pass
else:
return "No files here\n"
return "Sent files\n"
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
Structure for sending dummy filesfrom flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'doctor:':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'female':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'male':
gzip_address = './zipfiles/doc.tar.gz'
else:
return "No files here\n"
gzip_file = open(gzip_address).read()
return bytearray(gzip_file)
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
<commit_before>from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
pass
elif type == 'doctor:':
pass
elif type == 'female':
pass
elif type == 'male':
pass
else:
return "No files here\n"
return "Sent files\n"
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
<commit_msg>Structure for sending dummy files<commit_after>from flask import Flask
from flask import request
import flask
import hashlib
import json
import gzip
app = Flask(__name__)
stored_files = {}
@app.route('/profile/<type>', methods=['GET'])
def get_dummy_files(type):
if type == 'lawyer':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'doctor:':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'female':
gzip_address = './zipfiles/doc.tar.gz'
elif type == 'male':
gzip_address = './zipfiles/doc.tar.gz'
else:
return "No files here\n"
gzip_file = open(gzip_address).read()
return bytearray(gzip_file)
@app.route('/<int:id>', methods=['GET'])
def get_file(id):
if id in stored_files:
return stored_files[id]
else:
return "No such file\n"
@app.route('/', methods=['POST'])
def upload_file():
data = json.loads(request.data)
uploaded_file = data['uploaded_file']
salt = data['salt']
id = hashlib.sha256(uploaded_file.encode()).hexdigest()
stored_files[id] = (uploaded_file, salt)
return "File stored\n"
if __name__ == "__main__":
app.run()
|
24fc06d17303868ef4ea057cd001ec6cb49ab18f
|
flask_app.py
|
flask_app.py
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read())
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read().decode('utf-8'))
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
Fix utf-8 problem with åäö and friends.
|
Fix utf-8 problem with åäö and friends.
|
Python
|
bsd-3-clause
|
sknippen/refreeze,sknippen/refreeze,sknippen/refreeze
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read())
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Fix utf-8 problem with åäö and friends.
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read().decode('utf-8'))
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read())
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Fix utf-8 problem with åäö and friends.<commit_after>
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read().decode('utf-8'))
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read())
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
Fix utf-8 problem with åäö and friends.import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read().decode('utf-8'))
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
<commit_before>import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read())
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
<commit_msg>Fix utf-8 problem with åäö and friends.<commit_after>import os
from flask import Flask, render_template
from jinja2 import Template
app = Flask(__name__, template_folder='.', static_url_path='', static_folder='..')
app.config.from_pyfile('settings.py')
BASE = '/%s' % app.config['REPO_NAME']
@app.route('/')
def home():
with open('talk.md', 'r') as f:
template = Template(f.read().decode('utf-8'))
markdown = template.render(base=BASE)
js_file = 'talk.js'
if os.path.isfile(js_file):
with open(js_file, 'r') as f_js:
js = f_js.read()
else:
js = ''
return render_template('slides.html', markdown=markdown, js=js)
if __name__ == '__main__':
BASE = ''
port = int(os.environ.get('PORT', 5000))
app.run(host='0.0.0.0', port=port)
|
37d8fadd25ebf06207e046007097b06ecb9f33ac
|
numba/cuda/tests/cudapy/test_alignment.py
|
numba/cuda/tests/cudapy/test_alignment.py
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
Use Numpy dtype when creating Numpy array
|
Use Numpy dtype when creating Numpy array
(as opposed to the Numba dtype)
|
Python
|
bsd-2-clause
|
sklam/numba,pombredanne/numba,IntelLabs/numba,GaZ3ll3/numba,stonebig/numba,stuartarchibald/numba,IntelLabs/numba,jriehl/numba,IntelLabs/numba,ssarangi/numba,stonebig/numba,stonebig/numba,seibert/numba,seibert/numba,jriehl/numba,GaZ3ll3/numba,jriehl/numba,sklam/numba,stonebig/numba,gmarkall/numba,numba/numba,gmarkall/numba,stefanseefeld/numba,IntelLabs/numba,pombredanne/numba,cpcloud/numba,pombredanne/numba,seibert/numba,GaZ3ll3/numba,stuartarchibald/numba,stuartarchibald/numba,gdementen/numba,stefanseefeld/numba,stefanseefeld/numba,cpcloud/numba,stuartarchibald/numba,numba/numba,ssarangi/numba,sklam/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,ssarangi/numba,gdementen/numba,cpcloud/numba,ssarangi/numba,ssarangi/numba,pitrou/numba,pitrou/numba,GaZ3ll3/numba,gmarkall/numba,seibert/numba,numba/numba,numba/numba,GaZ3ll3/numba,gdementen/numba,sklam/numba,gdementen/numba,pitrou/numba,pombredanne/numba,IntelLabs/numba,gmarkall/numba,jriehl/numba,numba/numba,jriehl/numba,stonebig/numba,gdementen/numba,pitrou/numba,sklam/numba,pitrou/numba,stefanseefeld/numba,seibert/numba,pombredanne/numba,cpcloud/numba,stefanseefeld/numba
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
Use Numpy dtype when creating Numpy array
(as opposed to the Numba dtype)
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
<commit_msg>Use Numpy dtype when creating Numpy array
(as opposed to the Numba dtype)<commit_after>
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
Use Numpy dtype when creating Numpy array
(as opposed to the Numba dtype)import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
<commit_msg>Use Numpy dtype when creating Numpy array
(as opposed to the Numba dtype)<commit_after>import numpy as np
from numba import from_dtype, cuda
from numba import unittest_support as unittest
class TestAlignment(unittest.TestCase):
def test_record_alignment(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')], align=True)
rec = from_dtype(rec_dtype)
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
a_recarray = np.recarray(3, dtype=rec_dtype)
for i in range(a_recarray.size):
a_rec = a_recarray[i]
a_rec.a = 0
a_rec.b = (i + 1) * 123
foo[1, 3](a_recarray)
self.assertTrue(np.all(a_recarray.a == a_recarray.b))
def test_record_alignment_error(self):
rec_dtype = np.dtype([('a', 'int32'), ('b', 'float64')])
rec = from_dtype(rec_dtype)
with self.assertRaises(Exception) as raises:
@cuda.jit((rec[:],))
def foo(a):
i = cuda.grid(1)
a[i].a = a[i].b
self.assertTrue('type float64 is not aligned' in str(raises.exception))
if __name__ == '__main__':
unittest.main()
|
ba408df025136563c0eafe00551f23e44e9c2731
|
__openerp__.py
|
__openerp__.py
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
Change version to 1.0.1 unstable
|
Change version to 1.0.1 unstable
|
Python
|
agpl-3.0
|
xcgd/account_credit_transfer
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.1 unstable
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.1 unstable<commit_after>
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
Change version to 1.0.1 unstable# -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
<commit_before># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
<commit_msg>Change version to 1.0.1 unstable<commit_after># -*- coding: utf-8 -*-
{
"name": "Account Credit Transfer",
"version": "1.0.1",
"author": "XCG Consulting",
"website": "http://www.openerp-experts.com",
"category": 'Accounting',
"description": """Account Voucher Credit Transfer Payment.
You need to set up some things before using it.
A credit transfer config link a bank with a parser
A credit transfer parser link a parser with a template that you can upload
""",
"depends": [
'base',
'account_streamline',
],
"data": [
"security/ir.model.access.csv",
"views/config.xml",
"views/parser.xml",
"views/res.bank.xml",
],
'demo_xml': [],
'test': [],
'installable': True,
'active': False,
'external_dependencies': {
'python': ['genshi']
}
}
|
38a9f75bc87dbfb698b852145b7d62e9913602b4
|
tcldis.py
|
tcldis.py
|
from __future__ import print_function
def _tcldis_init():
import sys
import _tcldis
mod = sys.modules[__name__]
for key, value in _tcldis.__dict__.iteritems():
if not callable(value):
continue
mod.__dict__[key] = value
_tcldis_init()
|
from __future__ import print_function
import _tcldis
printbc = _tcldis.printbc
getbc = _tcldis.getbc
inst_table = _tcldis.inst_table
|
Stop trying to be overly clever
|
Stop trying to be overly clever
|
Python
|
bsd-3-clause
|
tolysz/tcldis,tolysz/tcldis,tolysz/tcldis,tolysz/tcldis
|
from __future__ import print_function
def _tcldis_init():
import sys
import _tcldis
mod = sys.modules[__name__]
for key, value in _tcldis.__dict__.iteritems():
if not callable(value):
continue
mod.__dict__[key] = value
_tcldis_init()
Stop trying to be overly clever
|
from __future__ import print_function
import _tcldis
printbc = _tcldis.printbc
getbc = _tcldis.getbc
inst_table = _tcldis.inst_table
|
<commit_before>from __future__ import print_function
def _tcldis_init():
import sys
import _tcldis
mod = sys.modules[__name__]
for key, value in _tcldis.__dict__.iteritems():
if not callable(value):
continue
mod.__dict__[key] = value
_tcldis_init()
<commit_msg>Stop trying to be overly clever<commit_after>
|
from __future__ import print_function
import _tcldis
printbc = _tcldis.printbc
getbc = _tcldis.getbc
inst_table = _tcldis.inst_table
|
from __future__ import print_function
def _tcldis_init():
import sys
import _tcldis
mod = sys.modules[__name__]
for key, value in _tcldis.__dict__.iteritems():
if not callable(value):
continue
mod.__dict__[key] = value
_tcldis_init()
Stop trying to be overly cleverfrom __future__ import print_function
import _tcldis
printbc = _tcldis.printbc
getbc = _tcldis.getbc
inst_table = _tcldis.inst_table
|
<commit_before>from __future__ import print_function
def _tcldis_init():
import sys
import _tcldis
mod = sys.modules[__name__]
for key, value in _tcldis.__dict__.iteritems():
if not callable(value):
continue
mod.__dict__[key] = value
_tcldis_init()
<commit_msg>Stop trying to be overly clever<commit_after>from __future__ import print_function
import _tcldis
printbc = _tcldis.printbc
getbc = _tcldis.getbc
inst_table = _tcldis.inst_table
|
0c8739457150e4ae6e47ffb42d43a560f607a141
|
tests/run.py
|
tests/run.py
|
from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
|
from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
Add test re: hide kwarg
|
Add test re: hide kwarg
|
Python
|
bsd-2-clause
|
frol/invoke,sophacles/invoke,mkusz/invoke,alex/invoke,mkusz/invoke,frol/invoke,pyinvoke/invoke,kejbaly2/invoke,mattrobenolt/invoke,mattrobenolt/invoke,pfmoore/invoke,tyewang/invoke,kejbaly2/invoke,pfmoore/invoke,singingwolfboy/invoke,pyinvoke/invoke
|
from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
Add test re: hide kwarg
|
from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
<commit_before>from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
<commit_msg>Add test re: hide kwarg<commit_after>
|
from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
Add test re: hide kwargfrom spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
<commit_before>from spec import eq_, skip, Spec, raises, ok_
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
<commit_msg>Add test re: hide kwarg<commit_after>from spec import eq_, skip, Spec, raises, ok_, trap
from invoke.run import run
from invoke.exceptions import Failure
class Run(Spec):
"""run()"""
def return_code_in_result(self):
r = run("echo 'foo'")
eq_(r.stdout, "foo\n")
eq_(r.return_code, 0)
eq_(r.exited, 0)
def nonzero_return_code_for_failures(self):
result = run("false", warn=True)
eq_(result.exited, 1)
result = run("goobypls", warn=True)
eq_(result.exited, 127)
@raises(Failure)
def fast_failures(self):
run("false")
def run_acts_as_success_boolean(self):
ok_(not run("false", warn=True))
ok_(run("true"))
def non_one_return_codes_still_act_as_False(self):
ok_(not run("goobypls", warn=True, hide=True))
def warn_kwarg_allows_continuing_past_failures(self):
eq_(run("false", warn=True).exited, 1)
@trap
def hide_kwarg_allows_hiding_output(self):
run("echo 'foo'", hide=True)
eq_(sys.stdall.getvalue(), "")
|
7fe3776a59de7a133c5e396cb43d9b4bcc476f7d
|
protocols/views.py
|
protocols/views.py
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request else None
form = ProtocolForm(data)
#import ipdb; ipdb.set_trace()
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request.POST else None
form = ProtocolForm(data)
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
Fix the check if form is submitted
|
Fix the check if form is submitted
|
Python
|
mit
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request else None
form = ProtocolForm(data)
#import ipdb; ipdb.set_trace()
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
Fix the check if form is submitted
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request.POST else None
form = ProtocolForm(data)
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request else None
form = ProtocolForm(data)
#import ipdb; ipdb.set_trace()
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
<commit_msg>Fix the check if form is submitted<commit_after>
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request.POST else None
form = ProtocolForm(data)
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request else None
form = ProtocolForm(data)
#import ipdb; ipdb.set_trace()
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
Fix the check if form is submittedfrom django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request.POST else None
form = ProtocolForm(data)
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
<commit_before>from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request else None
form = ProtocolForm(data)
#import ipdb; ipdb.set_trace()
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
<commit_msg>Fix the check if form is submitted<commit_after>from django.contrib.auth.decorators import login_required
from django.http import HttpResponse
from django.shortcuts import render
from members.models import User
from .models import Protocol, Topic
from .forms import ProtocolForm, TopicForm, InstitutionForm
@login_required
def add(request):
data = request.POST if request.POST else None
form = ProtocolForm(data)
if form.is_valid():
form.save()
return render(request, 'protocols/add.html', locals())
|
7d1a903845db60186318575db11a712cd62d884d
|
win-installer/gaphor-script.py
|
win-installer/gaphor-script.py
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.actionmanager import ActionManager
from gaphor.plugins.alignment import Alignment
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.services.diagramexportmanager import DiagramExportManager
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.filemanager import FileManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.mainwindow import Namespace
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.ui.mainwindow import Toolbox
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
Fix mypy import errors due to removed services
|
Fix mypy import errors due to removed services
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.actionmanager import ActionManager
from gaphor.plugins.alignment import Alignment
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.services.diagramexportmanager import DiagramExportManager
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.filemanager import FileManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.mainwindow import Namespace
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.ui.mainwindow import Toolbox
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
Fix mypy import errors due to removed services
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
<commit_before>if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.actionmanager import ActionManager
from gaphor.plugins.alignment import Alignment
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.services.diagramexportmanager import DiagramExportManager
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.filemanager import FileManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.mainwindow import Namespace
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.ui.mainwindow import Toolbox
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
<commit_msg>Fix mypy import errors due to removed services
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.actionmanager import ActionManager
from gaphor.plugins.alignment import Alignment
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.services.diagramexportmanager import DiagramExportManager
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.filemanager import FileManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.mainwindow import Namespace
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.ui.mainwindow import Toolbox
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
Fix mypy import errors due to removed services
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me>if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
<commit_before>if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.actionmanager import ActionManager
from gaphor.plugins.alignment import Alignment
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.services.diagramexportmanager import DiagramExportManager
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.filemanager import FileManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.ui.mainwindow import Namespace
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.ui.mainwindow import Toolbox
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
<commit_msg>Fix mypy import errors due to removed services
Signed-off-by: Dan Yeaw <2591e5f46f28d303f9dc027d475a5c60d8dea17a@yeaw.me><commit_after>if __name__ == "__main__":
import gaphor
from gaphor import core
from gaphor.services.componentregistry import ComponentRegistry
from gaphor.ui.consolewindow import ConsoleWindow
from gaphor.services.copyservice import CopyService
from gaphor.plugins.diagramlayout import DiagramLayout
from gaphor.ui.mainwindow import Diagrams
from gaphor.UML.elementfactory import ElementFactory
from gaphor.ui.elementeditor import ElementEditor
from gaphor.services.eventmanager import EventManager
from gaphor.services.helpservice import HelpService
from gaphor.ui.mainwindow import MainWindow
from gaphor.services.properties import Properties
from gaphor.plugins.pynsource import PyNSource
from gaphor.services.sanitizerservice import SanitizerService
from gaphor.services.undomanager import UndoManager
from gaphor.plugins.xmiexport import XMIExport
gaphor.main()
|
5aca39cef15ea4381b30127b8ded31ec37ffd273
|
script/notification/ifttt.py
|
script/notification/ifttt.py
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {'value1':self.__packpub_info['title'].encode('utf-8'), 'value2':self.__packpub_info['description'].encode('utf-8')})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {
'value1':self.__packpub_info['title'].encode('utf-8'),
'value2':self.__packpub_info['description'].encode('utf-8'),
'value3':self.__packpub_info['url_image']
})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
Add image to ifff request
|
Add image to ifff request
|
Python
|
mit
|
niqdev/packtpub-crawler,niqdev/packtpub-crawler,niqdev/packtpub-crawler
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {'value1':self.__packpub_info['title'].encode('utf-8'), 'value2':self.__packpub_info['description'].encode('utf-8')})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
Add image to ifff request
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {
'value1':self.__packpub_info['title'].encode('utf-8'),
'value2':self.__packpub_info['description'].encode('utf-8'),
'value3':self.__packpub_info['url_image']
})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
<commit_before>from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {'value1':self.__packpub_info['title'].encode('utf-8'), 'value2':self.__packpub_info['description'].encode('utf-8')})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
<commit_msg>Add image to ifff request<commit_after>
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {
'value1':self.__packpub_info['title'].encode('utf-8'),
'value2':self.__packpub_info['description'].encode('utf-8'),
'value3':self.__packpub_info['url_image']
})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {'value1':self.__packpub_info['title'].encode('utf-8'), 'value2':self.__packpub_info['description'].encode('utf-8')})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
Add image to ifff requestfrom logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {
'value1':self.__packpub_info['title'].encode('utf-8'),
'value2':self.__packpub_info['description'].encode('utf-8'),
'value3':self.__packpub_info['url_image']
})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
<commit_before>from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {'value1':self.__packpub_info['title'].encode('utf-8'), 'value2':self.__packpub_info['description'].encode('utf-8')})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
<commit_msg>Add image to ifff request<commit_after>from logs import *
import requests
class Ifttt(object):
"""
"""
def __init__(self, config, packpub_info, upload_info):
self.__packpub_info = packpub_info
self.__url = "https://maker.ifttt.com/trigger/{eventName}/with/key/{apiKey}".format(
eventName=config.get('ifttt', 'ifttt.event_name'),
apiKey=config.get('ifttt', 'ifttt.key')
)
def send(self):
r = requests.post(self.__url, data = {
'value1':self.__packpub_info['title'].encode('utf-8'),
'value2':self.__packpub_info['description'].encode('utf-8'),
'value3':self.__packpub_info['url_image']
})
log_success('[+] notification sent to IFTTT')
def sendError(self, exception, source):
title = "packtpub-crawler [{source}]: Could not download ebook".format(source=source)
r = requests.post(self.__url, data = {'value1':title, 'value2':repr(exception)})
log_success('[+] error notification sent to IFTTT')
|
392aeb99891ff9949c9e9e205743937d8e9cb632
|
bot/api/telegram.py
|
bot/api/telegram.py
|
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
import threading
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
self.local = threading.local()
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = self.__get_session().get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __get_session(self):
session = self.local.__dict__.get("session")
if not session:
session = requests.session()
self.local.session = session
return session
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lot
|
Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lot
|
Python
|
agpl-3.0
|
alvarogzp/telegram-bot,alvarogzp/telegram-bot
|
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lot
|
import threading
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
self.local = threading.local()
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = self.__get_session().get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __get_session(self):
session = self.local.__dict__.get("session")
if not session:
session = requests.session()
self.local.session = session
return session
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
<commit_before>import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
<commit_msg>Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lot<commit_after>
|
import threading
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
self.local = threading.local()
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = self.__get_session().get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __get_session(self):
session = self.local.__dict__.get("session")
if not session:
session = requests.session()
self.local.session = session
return session
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lotimport threading
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
self.local = threading.local()
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = self.__get_session().get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __get_session(self):
session = self.local.__dict__.get("session")
if not session:
session = requests.session()
self.local.session = session
return session
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
<commit_before>import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = requests.get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
<commit_msg>Use threading.local() to store a requests.Session object per-thread and use it to perform the requests, allowing connections to be reused, speeding bot replies a lot<commit_after>import threading
import requests
class TelegramBotApi:
"""This is a threading-safe API. Avoid breaking it by adding state."""
def __init__(self, auth_token, debug: bool):
self.base_url = "https://api.telegram.org/bot" + auth_token + "/"
self.debug = debug
self.local = threading.local()
def __getattr__(self, item):
return self.__get_request_from_function_name(item)
def __get_request_from_function_name(self, function_name):
return lambda **params: self.__send_request(function_name, params)
def __send_request(self, command, params):
request = self.__get_session().get(self.base_url + command, params=params, timeout=60)
self.__log_request(request)
response = request.json()
self.__log_response(response)
if not response["ok"]:
raise TelegramBotApiException(response["description"])
return response["result"]
def __get_session(self):
session = self.local.__dict__.get("session")
if not session:
session = requests.session()
self.local.session = session
return session
def __log_request(self, request):
if self.debug:
print(">> " + request.url)
def __log_response(self, response):
if self.debug:
print("<< " + str(response))
class TelegramBotApiException(Exception):
pass
|
91141713b672f56a8c45f0250b7e9216a69237f8
|
features/support/splinter_client.py
|
features/support/splinter_client.py
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
Increase splinter wait time to 15 seconds
|
Increase splinter wait time to 15 seconds
@gtrogers
@maxfliri
|
Python
|
mit
|
alphagov/backdrop,alphagov/backdrop,alphagov/backdrop
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
Increase splinter wait time to 15 seconds
@gtrogers
@maxfliri
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
<commit_before>import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
<commit_msg>Increase splinter wait time to 15 seconds
@gtrogers
@maxfliri<commit_after>
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
Increase splinter wait time to 15 seconds
@gtrogers
@maxfliriimport logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
<commit_before>import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs')
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
<commit_msg>Increase splinter wait time to 15 seconds
@gtrogers
@maxfliri<commit_after>import logging
from pymongo import MongoClient
from splinter import Browser
from features.support.http_test_client import HTTPTestClient
from features.support.support import Api
class SplinterClient(object):
def __init__(self, database_name):
self.database_name = database_name
self._write_api = Api.start('write', '5001')
def storage(self):
return MongoClient('localhost', 27017)[self.database_name]
def before_scenario(self):
self.browser = Browser('phantomjs', wait_time=15)
def after_scenario(self):
self.browser.quit()
def spin_down(self):
self._write_api.stop()
def get(self, url, headers=None):
self.browser.visit(self._write_api.url(url))
return SplinterResponse(self.browser)
class SplinterResponse:
def __init__(self, browser):
self.status_code = browser.status_code
self.data = None
self.headers = None
|
9516115f722fb3f95882553d8077bf1ab4a670ef
|
examples/web_demo/exifutil.py
|
examples/web_demo/exifutil.py
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
FIX web_demo upload was not processing grayscale correctly
|
FIX web_demo upload was not processing grayscale correctly
|
Python
|
bsd-2-clause
|
wangg12/caffe,longjon/caffe,gnina/gnina,gogartom/caffe-textmaps,wangg12/caffe,tackgeun/caffe,longjon/caffe,wangg12/caffe,tackgeun/caffe,tackgeun/caffe,gogartom/caffe-textmaps,gnina/gnina,tackgeun/caffe,gnina/gnina,CZCV/s-dilation-caffe,gnina/gnina,wangg12/caffe,longjon/caffe,gnina/gnina,gogartom/caffe-textmaps,CZCV/s-dilation-caffe,longjon/caffe,CZCV/s-dilation-caffe,CZCV/s-dilation-caffe,gnina/gnina,gogartom/caffe-textmaps
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
FIX web_demo upload was not processing grayscale correctly
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
<commit_before>"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
<commit_msg>FIX web_demo upload was not processing grayscale correctly<commit_after>
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
FIX web_demo upload was not processing grayscale correctly"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
<commit_before>"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
return np.asarray(im).astype(np.float32) / 255.
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
<commit_msg>FIX web_demo upload was not processing grayscale correctly<commit_after>"""
This script handles the skimage exif problem.
"""
from PIL import Image
import numpy as np
ORIENTATIONS = { # used in apply_orientation
2: (Image.FLIP_LEFT_RIGHT,),
3: (Image.ROTATE_180,),
4: (Image.FLIP_TOP_BOTTOM,),
5: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_90),
6: (Image.ROTATE_270,),
7: (Image.FLIP_LEFT_RIGHT, Image.ROTATE_270),
8: (Image.ROTATE_90,)
}
def open_oriented_im(im_path):
im = Image.open(im_path)
if hasattr(im, '_getexif'):
exif = im._getexif()
if exif is not None and 274 in exif:
orientation = exif[274]
im = apply_orientation(im, orientation)
img = np.asarray(im).astype(np.float32) / 255.
if img.ndim == 2:
img = img[:, :, np.newaxis]
img = np.tile(img, (1, 1, 3))
elif img.shape[2] == 4:
img = img[:, :, :3]
return img
def apply_orientation(im, orientation):
if orientation in ORIENTATIONS:
for method in ORIENTATIONS[orientation]:
im = im.transpose(method)
return im
|
cb4c0cb2c35d97e0364a4c010715cdf15d261e4c
|
basehandler.py
|
basehandler.py
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
def get_username(self):
"""Check if user has a valid cookie.
Returns username if cookie is valid."""
cookie = self.request.cookies.get('user')
if cookie and utils.valid_cookie(cookie):
username = cookie.split('|')[0]
return username
|
Add a method to get username if users have valid cookie
|
Add a method to get username if users have valid cookie
|
Python
|
mit
|
lttviet/udacity-final
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
Add a method to get username if users have valid cookie
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
def get_username(self):
"""Check if user has a valid cookie.
Returns username if cookie is valid."""
cookie = self.request.cookies.get('user')
if cookie and utils.valid_cookie(cookie):
username = cookie.split('|')[0]
return username
|
<commit_before># -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
<commit_msg>Add a method to get username if users have valid cookie<commit_after>
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
def get_username(self):
"""Check if user has a valid cookie.
Returns username if cookie is valid."""
cookie = self.request.cookies.get('user')
if cookie and utils.valid_cookie(cookie):
username = cookie.split('|')[0]
return username
|
# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
Add a method to get username if users have valid cookie# -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
def get_username(self):
"""Check if user has a valid cookie.
Returns username if cookie is valid."""
cookie = self.request.cookies.get('user')
if cookie and utils.valid_cookie(cookie):
username = cookie.split('|')[0]
return username
|
<commit_before># -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
<commit_msg>Add a method to get username if users have valid cookie<commit_after># -*- conding: utf-8 -*-
import os
import jinja2
import webapp2
import utils
JINJA_ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
autoescape=True)
class BaseHandler(webapp2.RequestHandler):
def render(self, template, **kw):
"""Method render takes a template file and key-value pairs.
It substitutes keys found in template with values in pairs.
The resulted page is sent back to user."""
t = JINJA_ENV.get_template(template)
self.response.write(t.render(kw))
def set_cookie(self, user):
"""Set user cookie in headers."""
cookie = utils.make_cookie(user)
self.response.headers.add_header(
'Set-Cookie',
'user={}; Path=/'.format(cookie))
def logout(self):
"""Set user cookie to empty in headers."""
self.response.headers.add_header('Set-Cookie',
'user=;Path=/')
def get_username(self):
"""Check if user has a valid cookie.
Returns username if cookie is valid."""
cookie = self.request.cookies.get('user')
if cookie and utils.valid_cookie(cookie):
username = cookie.split('|')[0]
return username
|
d33d7e5bf29d8c135c68eb5f1206d2f7df6f42ed
|
froide/foirequest/search_indexes.py
|
froide/foirequest/search_indexes.py
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
Add URL and description to search index of FoiRequest
|
Add URL and description to search index of FoiRequest
|
Python
|
mit
|
catcosmo/froide,fin/froide,catcosmo/froide,fin/froide,ryankanno/froide,stefanw/froide,catcosmo/froide,okfse/froide,ryankanno/froide,ryankanno/froide,CodeforHawaii/froide,LilithWittmann/froide,okfse/froide,CodeforHawaii/froide,ryankanno/froide,stefanw/froide,LilithWittmann/froide,okfse/froide,fin/froide,okfse/froide,CodeforHawaii/froide,stefanw/froide,catcosmo/froide,CodeforHawaii/froide,LilithWittmann/froide,okfse/froide,LilithWittmann/froide,ryankanno/froide,stefanw/froide,catcosmo/froide,fin/froide,LilithWittmann/froide,stefanw/froide,CodeforHawaii/froide
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
Add URL and description to search index of FoiRequest
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
<commit_before>from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
<commit_msg>Add URL and description to search index of FoiRequest<commit_after>
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
Add URL and description to search index of FoiRequestfrom haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
<commit_before>from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
<commit_msg>Add URL and description to search index of FoiRequest<commit_after>from haystack import indexes
from haystack import site
from foirequest.models import FoiRequest
class FoiRequestIndex(indexes.SearchIndex):
text = indexes.EdgeNgramField(document=True, use_template=True)
title = indexes.CharField(model_attr='title')
description = indexes.CharField(model_attr='description')
status = indexes.CharField(model_attr='status')
first_message = indexes.DateTimeField(model_attr='first_message')
last_message = indexes.DateTimeField(model_attr='last_message')
url = indexes.CharField(model_attr='get_absolute_url')
def get_queryset(self):
"""Used when the entire index for model is updated."""
return FoiRequest.objects.get_for_search_index()
site.register(FoiRequest, FoiRequestIndex)
|
4c646128cfcb6d59445890c257447f01ed77a706
|
core/management/update_email_forwards.py
|
core/management/update_email_forwards.py
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias.email+'@arenbergorkest.be', ' '.join(alias.destinations))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias['email']+'@arenbergorkest.be', ' '.join(alias['destinations']))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
Fix python syntax bug in update email fwd's script
|
Fix python syntax bug in update email fwd's script
|
Python
|
mit
|
tfiers/arenberg-online,tfiers/arenberg-online,tfiers/arenberg-online
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias.email+'@arenbergorkest.be', ' '.join(alias.destinations))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
Fix python syntax bug in update email fwd's script
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias['email']+'@arenbergorkest.be', ' '.join(alias['destinations']))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
<commit_before># Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias.email+'@arenbergorkest.be', ' '.join(alias.destinations))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
<commit_msg>Fix python syntax bug in update email fwd's script<commit_after>
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias['email']+'@arenbergorkest.be', ' '.join(alias['destinations']))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias.email+'@arenbergorkest.be', ' '.join(alias.destinations))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
Fix python syntax bug in update email fwd's script# Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias['email']+'@arenbergorkest.be', ' '.join(alias['destinations']))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
<commit_before># Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias.email+'@arenbergorkest.be', ' '.join(alias.destinations))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
<commit_msg>Fix python syntax bug in update email fwd's script<commit_after># Virtual alias file syntax:
# email, space, email, (space, email, space, email,) newline, (repeat)
# Example:
# groep@arenbergorkest.be jef@gmail.com jos@hotmail.com
# jef@arenbergokest.be jef@gmail.com
# Catchall alias email = '@arenbergorkest.be'
from email_aliases import aliases
c = '' # New content of postfix virtual aliases file
for alias in aliases:
c += '{} {}\n'.format(alias['email']+'@arenbergorkest.be', ' '.join(alias['destinations']))
from subprocess import call
VIRTUAL_ALIAS_FILE = '/etc/postfix/virtual'
with open(VIRTUAL_ALIAS_FILE, 'w') as f:
f.write(c)
call(['sudo', 'postmap', VIRTUAL_ALIAS_FILE])
|
f7611e37ef1e0dfaa568515be365d50b3edbd11c
|
ccdproc/conftest.py
|
ccdproc/conftest.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
Fix plugin import for astropy 2.x
|
Fix plugin import for astropy 2.x
|
Python
|
bsd-3-clause
|
astropy/ccdproc,mwcraig/ccdproc
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
Fix plugin import for astropy 2.x
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
<commit_msg>Fix plugin import for astropy 2.x<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
Fix plugin import for astropy 2.x# Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
<commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
<commit_msg>Fix plugin import for astropy 2.x<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
# this contains imports plugins that configure py.test for astropy tests.
# by importing them here in conftest.py they are discoverable by py.test
# no matter how it is invoked within the source tree.
import os
try:
from astropy.tests.plugins.display import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
except ImportError:
# When using astropy 2.0
from astropy.tests.pytest_plugins import (pytest_report_header,
PYTEST_HEADER_MODULES,
TESTED_VERSIONS)
try:
# This is the way to get plugins in astropy 2.x
from astropy.tests.pytest_plugins import *
except ImportError:
# Otherwise they are installed as separate packages that pytest
# automagically finds.
pass
from .tests.pytest_fixtures import *
# This is to figure out ccdproc version, rather than using Astropy's
try:
from .version import version
except ImportError:
version = 'dev'
packagename = os.path.basename(os.path.dirname(__file__))
TESTED_VERSIONS[packagename] = version
# Uncomment the following line to treat all DeprecationWarnings as
# exceptions
# enable_deprecations_as_exceptions()
# Add astropy to test header information and remove unused packages.
try:
PYTEST_HEADER_MODULES['Astropy'] = 'astropy'
PYTEST_HEADER_MODULES['astroscrappy'] = 'astroscrappy'
PYTEST_HEADER_MODULES['reproject'] = 'reproject'
del PYTEST_HEADER_MODULES['h5py']
except KeyError:
pass
|
08aa5214a1b1a5fc6872de76b12cf97f5ceb03c9
|
pymatgen/ext/tests/test_jhu.py
|
pymatgen/ext/tests/test_jhu.py
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(not website_is_up, "http://muellergroup.jhu.edu:8080 is down.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(True, "This code is way too buggy to be tested.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
Disable JHU kpoint generationt test.
|
Disable JHU kpoint generationt test.
|
Python
|
mit
|
gmatteo/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,gmatteo/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,vorwerkc/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,fraricci/pymatgen
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(not website_is_up, "http://muellergroup.jhu.edu:8080 is down.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
Disable JHU kpoint generationt test.
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(True, "This code is way too buggy to be tested.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
<commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(not website_is_up, "http://muellergroup.jhu.edu:8080 is down.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
<commit_msg>Disable JHU kpoint generationt test.<commit_after>
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(True, "This code is way too buggy to be tested.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(not website_is_up, "http://muellergroup.jhu.edu:8080 is down.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
Disable JHU kpoint generationt test.# coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(True, "This code is way too buggy to be tested.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
<commit_before># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(not website_is_up, "http://muellergroup.jhu.edu:8080 is down.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
<commit_msg>Disable JHU kpoint generationt test.<commit_after># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
import unittest
import requests
from pymatgen.ext.jhu import get_kpoints
from pymatgen.io.vasp.inputs import Incar
from pymatgen.io.vasp.sets import MPRelaxSet
from pymatgen.util.testing import PymatgenTest
__author__ = "Joseph Montoya"
__copyright__ = "Copyright 2017, The Materials Project"
__maintainer__ = "Joseph Montoya"
__email__ = "montoyjh@lbl.gov"
__date__ = "June 22, 2017"
website_is_up = requests.get("http://muellergroup.jhu.edu:8080").status_code == 200
@unittest.skipIf(True, "This code is way too buggy to be tested.")
class JhuTest(PymatgenTest):
_multiprocess_shared_ = True
def test_get_kpoints(self):
si = PymatgenTest.get_structure("Si")
input_set = MPRelaxSet(si)
kpoints = get_kpoints(si, incar=input_set.incar)
if __name__ == "__main__":
unittest.main()
|
89d9787fc5aa595f6d93d49565313212c2f95b6b
|
helper_servers/flask_upload.py
|
helper_servers/flask_upload.py
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Upload</title>
<form enctype="multipart/form-data" action="/ul" method="POST">
<input type="file" id="file" name="file">
<input type="submit">
</form>
'''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
Add simple form to flask upload server
|
Add simple form to flask upload server
|
Python
|
bsd-3-clause
|
stephenbradshaw/pentesting_stuff,stephenbradshaw/pentesting_stuff,stephenbradshaw/pentesting_stuff,stephenbradshaw/pentesting_stuff
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
Add simple form to flask upload server
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Upload</title>
<form enctype="multipart/form-data" action="/ul" method="POST">
<input type="file" id="file" name="file">
<input type="submit">
</form>
'''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
<commit_before>from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
<commit_msg>Add simple form to flask upload server<commit_after>
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Upload</title>
<form enctype="multipart/form-data" action="/ul" method="POST">
<input type="file" id="file" name="file">
<input type="submit">
</form>
'''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
Add simple form to flask upload serverfrom flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Upload</title>
<form enctype="multipart/form-data" action="/ul" method="POST">
<input type="file" id="file" name="file">
<input type="submit">
</form>
'''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
<commit_before>from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
<commit_msg>Add simple form to flask upload server<commit_after>from flask import Flask, render_template, request, redirect, url_for
from werkzeug.utils import secure_filename
import datetime
import os
def timestamp():
return datetime.datetime.now().strftime('%Y%m%d%H%M%S')
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = '/uploads'
@app.route('/', methods=['GET'])
def index():
return '''
<!doctype html>
<title>Hi</title>
Hi
'''
#curl -F file=@"/tmp/test.txt" https://[site]/[app_path]/ul
@app.route('/ul', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
if 'file' not in request.files:
return redirect(request.url)
file = request.files['file']
if file.filename == '':
return redirect(request.url)
if file:
filename = '{}_{}.data'.format(timestamp(), secure_filename(file.filename))
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
return redirect(url_for('upload_file'))
return '''
<!doctype html>
<title>Upload</title>
<form enctype="multipart/form-data" action="/ul" method="POST">
<input type="file" id="file" name="file">
<input type="submit">
</form>
'''
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
9f1134174c594564519a88cbfafe443b2be782e2
|
python/render/render_tracks.py
|
python/render/render_tracks.py
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
Update track_name, short_label, and long_label per discussions on 2016-09-09
|
Update track_name, short_label, and long_label per discussions on 2016-09-09
|
Python
|
mit
|
Duke-GCB/TrackHubGenerator,Duke-GCB/TrackHubGenerator
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
Update track_name, short_label, and long_label per discussions on 2016-09-09
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
<commit_before>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
<commit_msg>Update track_name, short_label, and long_label per discussions on 2016-09-09<commit_after>
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
Update track_name, short_label, and long_label per discussions on 2016-09-09__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
<commit_before>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}({})'.format(metadata['protein'], metadata['serial_number'], metadata['author_identifier'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = '{}_{} binding'.format(metadata['protein'], metadata['serial_number'])
d['long_label'] = 'Predicted {} binding sites (site width = {})'.format(metadata['protein'], metadata['width'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
<commit_msg>Update track_name, short_label, and long_label per discussions on 2016-09-09<commit_after>__author__ = 'dcl9'
from render import render_template
import argparse
import yaml
def generate_track_dict(metadata):
d = dict()
d['track_name'] = '{}_{}'.format(metadata['protein'], metadata['serial_number'])
d['bigbed_url'] = metadata['track_filename']
d['short_label'] = metadata['protein']
d['long_label'] = 'Predicted {} binding sites (site width = {}); iMADS model {}'.format(metadata['protein'], metadata['width'], metadata['serial_number'])
return d
def render_tracks(assembly, metadata_file):
obj = yaml.load(metadata_file)
# Just pull out the assembly ones
tracks = [generate_track_dict(x) for x in obj if x['assembly'] == assembly]
trackdb = {'tracks': tracks}
render_template(trackdb, 'trackDb')
def main():
parser = argparse.ArgumentParser(description='Render trackDb.txt')
parser.add_argument('--assembly')
parser.add_argument('metadata_file', type=argparse.FileType('r'))
args = parser.parse_args()
render_tracks(args.assembly, args.metadata_file)
if __name__ == '__main__':
main()
|
fa5bb37159d09c5bff53b83a4821e3f154892d1d
|
numba/cuda/device_init.py
|
numba/cuda/device_init.py
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
Fix issue with test discovery and broken CUDA drivers.
|
Fix issue with test discovery and broken CUDA drivers.
This patch allows the test discovery mechanism to work even in the
case of a broken/misconfigured CUDA driver.
Fixes #2841
|
Python
|
bsd-2-clause
|
sklam/numba,cpcloud/numba,sklam/numba,numba/numba,seibert/numba,jriehl/numba,numba/numba,stuartarchibald/numba,jriehl/numba,IntelLabs/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,IntelLabs/numba,jriehl/numba,seibert/numba,numba/numba,jriehl/numba,gmarkall/numba,numba/numba,sklam/numba,seibert/numba,stuartarchibald/numba,IntelLabs/numba,sklam/numba,IntelLabs/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,gmarkall/numba,stonebig/numba,seibert/numba,cpcloud/numba,stuartarchibald/numba,gmarkall/numba,sklam/numba,seibert/numba,stonebig/numba,gmarkall/numba,numba/numba,stuartarchibald/numba,cpcloud/numba
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
Fix issue with test discovery and broken CUDA drivers.
This patch allows the test discovery mechanism to work even in the
case of a broken/misconfigured CUDA driver.
Fixes #2841
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
<commit_before>from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
<commit_msg>Fix issue with test discovery and broken CUDA drivers.
This patch allows the test discovery mechanism to work even in the
case of a broken/misconfigured CUDA driver.
Fixes #2841<commit_after>
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
Fix issue with test discovery and broken CUDA drivers.
This patch allows the test discovery mechanism to work even in the
case of a broken/misconfigured CUDA driver.
Fixes #2841from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
<commit_before>from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
return driver.driver.is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
<commit_msg>Fix issue with test discovery and broken CUDA drivers.
This patch allows the test discovery mechanism to work even in the
case of a broken/misconfigured CUDA driver.
Fixes #2841<commit_after>from __future__ import print_function, absolute_import, division
# Re export
from .stubs import (threadIdx, blockIdx, blockDim, gridDim, syncthreads,
shared, local, const, grid, gridsize, atomic,
threadfence_block, threadfence_system,
threadfence)
from .cudadrv.error import CudaSupportError
from .cudadrv import nvvm
from . import initialize
from .errors import KernelRuntimeError
from .decorators import jit, autojit, declare_device
from .api import *
from .api import _auto_device
from .kernels import reduction
reduce = Reduce = reduction.Reduce
def is_available():
"""Returns a boolean to indicate the availability of a CUDA GPU.
This will initialize the driver if it hasn't been initialized.
"""
# whilst `driver.is_available` will init the driver itself,
# the driver initialization may raise and as a result break
# test discovery/orchestration as `cuda.is_available` is often
# used as a guard for whether to run a CUDA test, the try/except
# below is to handle this case.
driver_is_available = False
try:
driver_is_available = driver.driver.is_available
except CudaSupportError:
pass
return driver_is_available and nvvm.is_available()
def cuda_error():
"""Returns None or an exception if the CUDA driver fails to initialize.
"""
return driver.driver.initialization_error
initialize.initialize_all()
|
bf73e73c93c323a6b7395b3a1d40dd55dea4b65a
|
indra/sources/dgi/__init__.py
|
indra/sources/dgi/__init__.py
|
# -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
|
Update init file with descriptions
|
Update init file with descriptions
|
Python
|
bsd-2-clause
|
bgyori/indra,johnbachman/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/belpy,bgyori/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra
|
Update init file with descriptions
|
# -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
|
<commit_before><commit_msg>Update init file with descriptions<commit_after>
|
# -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
|
Update init file with descriptions# -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
|
<commit_before><commit_msg>Update init file with descriptions<commit_after># -*- coding: utf-8 -*-
"""A processor for the `Drug Gene Interaction Database (DGI-DB) <http://www.dgidb.org>`_.
* `Integration of the Drug–Gene Interaction Database (DGIdb 4.0) with open crowdsource efforts
<https://doi.org/10.1093/nar/gkaa1084>`_. Freshour, *et al*. Nucleic Acids Research. 2020 Nov 25.
Interactions data from the January 2021 release can be obtained at the
following URLs:
* https://www.dgidb.org/data/monthly_tsvs/2021-Jan/interactions.tsv
"""
from .api import get_version_df
from .processor import DGIProcessor
|
|
64a78085fffe8dc525596b870c8e150d9171f271
|
resources/site-packages/pulsar/monitor.py
|
resources/site-packages/pulsar/monitor.py
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
|
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <7f3ba7bbc079b62d3fe54555c2b0ce0ddda2bb7c@gmail.com>
|
Python
|
bsd-3-clause
|
likeitneverwentaway/plugin.video.quasar,komakino/plugin.video.pulsar,johnnyslt/plugin.video.quasar,elrosti/plugin.video.pulsar,Zopieux/plugin.video.pulsar,pmphxs/plugin.video.pulsar,johnnyslt/plugin.video.quasar,steeve/plugin.video.pulsar,peer23peer/plugin.video.quasar,peer23peer/plugin.video.quasar,likeitneverwentaway/plugin.video.quasar
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <7f3ba7bbc079b62d3fe54555c2b0ce0ddda2bb7c@gmail.com>
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
<commit_before>import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
<commit_msg>Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <7f3ba7bbc079b62d3fe54555c2b0ce0ddda2bb7c@gmail.com><commit_after>
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <7f3ba7bbc079b62d3fe54555c2b0ce0ddda2bb7c@gmail.com>import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
<commit_before>import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
self._closing.set()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
<commit_msg>Fix issue where Pulsar would enter a restart loop when cancelling a buffering
Signed-off-by: Steeve Morin <7f3ba7bbc079b62d3fe54555c2b0ce0ddda2bb7c@gmail.com><commit_after>import xbmc
import urllib2
import threading
from pulsar.config import PULSARD_HOST
class PulsarMonitor(xbmc.Monitor):
def __init__(self):
self._closing = threading.Event()
@property
def closing(self):
return self._closing
def onAbortRequested(self):
# Only when closing Kodi
if xbmc.abortRequested:
self._closing.set()
self._closing.clear()
def onSettingsChanged(self):
try:
urllib2.urlopen("%s/reload" % PULSARD_HOST)
except:
pass
|
67c98ba67f99d5de5022b32fdb3eb9cd0d96908f
|
scripts/tappedout.py
|
scripts/tappedout.py
|
from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
|
from binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message
|
Add missing line on the script and some comment
|
Add missing line on the script and some comment
|
Python
|
apache-2.0
|
jstuyck/MitmProxyScripts
|
from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')Add missing line on the script and some comment
|
from binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message
|
<commit_before>from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')<commit_msg>Add missing line on the script and some comment<commit_after>
|
from binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message
|
from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')Add missing line on the script and some commentfrom binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message
|
<commit_before>from binascii import unhexlify
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')<commit_msg>Add missing line on the script and some comment<commit_after>from binascii import unhexlify
#This can be replace by using the "decode" function on the reponse
def request(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.request.headers['Accept-Encoding'] = ['']
def response(context, flow):
if (flow.request.host.find('change.me') > -1 and flow.request.path.find('somethingaboutcurrency') > -1):
flow.response.content = unhexlify('[..]ffffff[..]')
#this ca be enhanced by using the protobuf to deserialize the message
|
151a5f75a240c875fc591390c208c933e8d0e782
|
indra/sources/eidos/eidos_reader.py
|
indra/sources/eidos/eidos_reader.py
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
Update name of Eidos reading class
|
Update name of Eidos reading class
|
Python
|
bsd-2-clause
|
johnbachman/indra,pvtodorov/indra,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,pvtodorov/indra
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
Update name of Eidos reading class
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
<commit_before>import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
<commit_msg>Update name of Eidos reading class<commit_after>
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
Update name of Eidos reading classimport json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
<commit_before>import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.AgroSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
<commit_msg>Update name of Eidos reading class<commit_after>import json
from indra.java_vm import autoclass, JavaException
class EidosReader(object):
"""Reader object keeping an instance of the Eidos reader as a singleton.
This allows the Eidos reader to need initialization when the first piece of
text is read, the subsequent readings are done with the same
instance of the reader and are therefore faster.
Attributes
----------
eidos_reader : org.clulab.wm.AgroSystem
A Scala object, an instance of the Eidos reading system. It is
instantiated only when first processing text.
"""
def __init__(self):
self.eidos_reader = None
def process_text(self, text):
"""Return a mentions JSON object given text.
Parameters
----------
text : str
Text to be processed.
Returns
-------
json_dict : dict
A JSON object of mentions extracted from text.
"""
if self.eidos_reader is None:
eidos = autoclass('org.clulab.wm.EidosSystem')
self.eidos_reader = eidos(autoclass('java.lang.Object')())
mentions = self.eidos_reader.extractFrom(text)
ser = autoclass('org.clulab.wm.serialization.json.WMJSONSerializer')
mentions_json = ser.toJsonStr(mentions)
json_dict = json.loads(mentions_json)
return json_dict
|
09ed0e911e530e9b907ac92f2892248b6af245fa
|
vcr/errors.py
|
vcr/errors.py
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
Add cassette and failed request as properties of thrown CannotOverwriteCassetteException
|
Add cassette and failed request as properties of thrown CannotOverwriteCassetteException
|
Python
|
mit
|
kevin1024/vcrpy,graingert/vcrpy,graingert/vcrpy,kevin1024/vcrpy
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
Add cassette and failed request as properties of thrown CannotOverwriteCassetteException
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
<commit_before>class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
<commit_msg>Add cassette and failed request as properties of thrown CannotOverwriteCassetteException<commit_after>
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
Add cassette and failed request as properties of thrown CannotOverwriteCassetteExceptionclass CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
<commit_before>class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
<commit_msg>Add cassette and failed request as properties of thrown CannotOverwriteCassetteException<commit_after>class CannotOverwriteExistingCassetteException(Exception):
def __init__(self, *args, **kwargs):
self.cassette = kwargs["cassette"]
self.failed_request = kwargs["failed_request"]
message = self._get_message(kwargs["cassette"], kwargs["failed_request"])
super(CannotOverwriteExistingCassetteException, self).__init__(message)
def _get_message(self, cassette, failed_request):
"""Get the final message related to the exception"""
# Get the similar requests in the cassette that
# have match the most with the request.
best_matches = cassette.find_requests_with_most_matches(failed_request)
# Build a comprehensible message to put in the exception.
best_matches_msg = ""
for best_match in best_matches:
request, _, failed_matchers_assertion_msgs = best_match
best_matches_msg += "Similar request found : (%r).\n" % request
for failed_matcher, assertion_msg in failed_matchers_assertion_msgs:
best_matches_msg += "Matcher failed : %s\n" "%s\n" % (
failed_matcher,
assertion_msg,
)
return (
"Can't overwrite existing cassette (%r) in "
"your current record mode (%r).\n"
"No match for the request (%r) was found.\n"
"%s"
% (cassette._path, cassette.record_mode, failed_request, best_matches_msg)
)
class UnhandledHTTPRequestError(KeyError):
"""Raised when a cassette does not contain the request we want."""
pass
|
392f209791eede86d65f018a9b873b33cb7ccb02
|
test/test_uniprot_retrieval_data.py
|
test/test_uniprot_retrieval_data.py
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
Fix issue with GO term (unsorted).
|
Fix issue with GO term (unsorted).
|
Python
|
agpl-3.0
|
ArnaudBelcour/Workflow_GeneList_Analysis,ArnaudBelcour/Workflow_GeneList_Analysis
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
Fix issue with GO term (unsorted).
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
<commit_before>import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
<commit_msg>Fix issue with GO term (unsorted).<commit_after>
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
Fix issue with GO term (unsorted).import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
<commit_before>import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist(), df_result_truth['GOs'].tolist())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
<commit_msg>Fix issue with GO term (unsorted).<commit_after>import numpy as np
import pandas as pa
import unittest
import pathway_extraction.uniprot_retrieval_data as uniprot_retrieval_data
test_data_directory_uniprot = 'test_data/' + 'test_uniprot_retrieval/'
class uniprot_retrieval_data_test(unittest.TestCase):
def test_extract_information_from_uniprot(self):
print("\nTesting uniprot retrieval data using blast result ")
df_data = pa.read_csv(test_data_directory_uniprot + 'data.tsv', sep='\t')
df_data.replace(np.nan, '', regex=True, inplace=True)
df_result = uniprot_retrieval_data.extract_information_from_uniprot(df_data)
df_result_truth = pa.read_csv(test_data_directory_uniprot + 'result.tsv', sep='\t')
np.testing.assert_array_equal(df_result['GOs'].tolist().sort(), df_result_truth['GOs'].tolist().sort())
np.testing.assert_array_equal(df_result['InterProScan'].tolist(), df_result_truth['InterProScan'].tolist())
|
e17fe26503e9a72b43c1b9b662dd4319ccff1fd7
|
server/__init__.py
|
server/__init__.py
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
|
Use an immutable tagged version of the Docker CLI container
|
Use an immutable tagged version of the Docker CLI container
|
Python
|
apache-2.0
|
DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
Use an immutable tagged version of the Docker CLI container
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
|
<commit_before>import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
<commit_msg>Use an immutable tagged version of the Docker CLI container<commit_after>
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
|
import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
Use an immutable tagged version of the Docker CLI containerimport os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
|
<commit_before>import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk'
)
<commit_msg>Use an immutable tagged version of the Docker CLI container<commit_after>import os
from girder.utility.webroot import Webroot
from .rest_slicer_cli import genRESTEndPointsForSlicerCLIsInDocker
_template = os.path.join(
os.path.dirname(__file__),
'webroot.mako'
)
def load(info):
girderRoot = info['serverRoot']
histomicsRoot = Webroot(_template)
histomicsRoot.updateHtmlVars(girderRoot.vars)
histomicsRoot.updateHtmlVars({'title': 'HistomicsTK'})
info['serverRoot'].histomicstk = histomicsRoot
info['serverRoot'].girder = girderRoot
genRESTEndPointsForSlicerCLIsInDocker(
info, 'HistomicsTK', 'dsarchive/histomicstk:v0.1.0'
)
|
b60fb0db2cc1ab3605f34e9b604e920279434c36
|
vterm_test.py
|
vterm_test.py
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
Disable mouse handling in vterm example.
|
Disable mouse handling in vterm example.
|
Python
|
lgpl-2.1
|
westurner/urwid,wardi/urwid,zyga/urwid,douglas-larocca/urwid,harlowja/urwid,drestebon/urwid,hkoof/urwid,bk2204/urwid,urwid/urwid,drestebon/urwid,inducer/urwid,hkoof/urwid,hkoof/urwid,bk2204/urwid,rndusr/urwid,rndusr/urwid,zyga/urwid,westurner/urwid,inducer/urwid,rndusr/urwid,mountainstorm/urwid,foreni-packages/urwid,foreni-packages/urwid,ivanov/urwid,tonycpsu/urwid,harlowja/urwid,zyga/urwid,drestebon/urwid,tonycpsu/urwid,mountainstorm/urwid,urwid/urwid,foreni-packages/urwid,douglas-larocca/urwid,ivanov/urwid,douglas-larocca/urwid,Julian/urwid,westurner/urwid,harlowja/urwid,wardi/urwid,Julian/urwid,Julian/urwid,ivanov/urwid,mountainstorm/urwid,tonycpsu/urwid,urwid/urwid,inducer/urwid,wardi/urwid,bk2204/urwid
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
Disable mouse handling in vterm example.
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
<commit_msg>Disable mouse handling in vterm example.<commit_after>
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
Disable mouse handling in vterm example.#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
<commit_msg>Disable mouse handling in vterm example.<commit_after>#!/usr/bin/python
import urwid
def main():
event_loop = urwid.SelectEventLoop()
mainframe = urwid.Frame(
urwid.Columns([
('fixed', 3, urwid.SolidFill('|')),
urwid.Pile([
('weight', 70, urwid.TerminalWidget(None, event_loop)),
('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))),
]),
('fixed', 3, urwid.SolidFill('|')),
], box_columns=[1]),
header=urwid.Columns([
('fixed', 3, urwid.Text('.,:')),
urwid.Divider('-'),
('fixed', 3, urwid.Text(':,.')),
]),
footer=urwid.Columns([
('fixed', 3, urwid.Text('`"*')),
urwid.Divider('-'),
('fixed', 3, urwid.Text('*"\'')),
]),
)
def quit(key):
if key in ('q', 'Q'):
raise urwid.ExitMainLoop()
loop = urwid.MainLoop(
mainframe,
handle_mouse=False,
unhandled_input=quit,
event_loop=event_loop
).run()
if __name__ == '__main__':
main()
|
e1fc818b8d563c00c77060cd74d2781b287c0b5d
|
xnuplot/__init__.py
|
xnuplot/__init__.py
|
from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
|
from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
|
Include Plot, SPlot in xnuplot.__all__.
|
Include Plot, SPlot in xnuplot.__all__.
|
Python
|
mit
|
marktsuchida/Xnuplot
|
from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
Include Plot, SPlot in xnuplot.__all__.
|
from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
|
<commit_before>from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
<commit_msg>Include Plot, SPlot in xnuplot.__all__.<commit_after>
|
from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
|
from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
Include Plot, SPlot in xnuplot.__all__.from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
|
<commit_before>from .plot import Plot, SPlot
__all__ = ["gnuplot", "numplot"]
<commit_msg>Include Plot, SPlot in xnuplot.__all__.<commit_after>from .plot import Plot, SPlot
__all__ = ["Plot", "SPlot", "gnuplot", "numplot"]
|
7c37d4f95897ddbc061ec0a84185a19899b85b89
|
compile_for_dist.py
|
compile_for_dist.py
|
#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
Update shebang to use /usr/bin/env.
|
Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.
|
Python
|
apache-2.0
|
quattor/aquilon-protocols,quattor/aquilon-protocols
|
#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.
|
#!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
<commit_before>#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
<commit_msg>Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.<commit_after>
|
#!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.#!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
<commit_before>#!/ms/dist/python/PROJ/core/2.5.2-1/bin/python
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
<commit_msg>Update shebang to use /usr/bin/env.
Remove the /ms/dist reference.<commit_after>#!/usr/bin/env python2.6
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# Copyright (C) 2008 Morgan Stanley
#
# This module is part of Aquilon
"""Add /ms/dist to traceback of files compiled in /ms/dev."""
import sys
import py_compile
import re
def main(args=None):
"""Except for the custom dfile, this is stolen directly from py_compile.
Compile all of the given filename arguments. This custom version
replaces /ms/dev in the path with /ms/dist to match our environment usage.
"""
if args is None:
args = sys.argv[1:]
dev_re = re.compile(r'/ms/dev/(?P<meta>[^/]+)/(?P<proj>[^/]+)'
r'/(?P<release>[^/]+)/install/(?P<path>.*)')
for filename in args:
try:
m = dev_re.match(filename)
if m:
dfile = "/ms/dist/%(meta)s/PROJ/%(proj)s" \
"/%(release)s/%(path)s" % m.groupdict()
else:
dfile = filename
py_compile.compile(filename, dfile=dfile, doraise=True)
except py_compile.PyCompileError, e:
sys.stderr.write(e.msg)
if __name__ == "__main__":
main()
|
6aa8db30afba817ff9b5653480d6f735f09d9c3a
|
ladder.py
|
ladder.py
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
Add players, match_valid to Ladder
|
Add players, match_valid to Ladder
-Add rudimentary players() method that creates a set
-Add match_valid() method that *only* checks that players are in
the ladder standings
|
Python
|
agpl-3.0
|
massgo/mgaladder,hndrewaall/mgaladder,massgo/mgaladder,massgo/mgaladder,hndrewaall/mgaladder,hndrewaall/mgaladder
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
Add players, match_valid to Ladder
-Add rudimentary players() method that creates a set
-Add match_valid() method that *only* checks that players are in
the ladder standings
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
<commit_before>#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
<commit_msg>Add players, match_valid to Ladder
-Add rudimentary players() method that creates a set
-Add match_valid() method that *only* checks that players are in
the ladder standings<commit_after>
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
Add players, match_valid to Ladder
-Add rudimentary players() method that creates a set
-Add match_valid() method that *only* checks that players are in
the ladder standings#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
<commit_before>#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
<commit_msg>Add players, match_valid to Ladder
-Add rudimentary players() method that creates a set
-Add match_valid() method that *only* checks that players are in
the ladder standings<commit_after>#! /usr/bin/env python3
class Player:
def __init__(self, name, rank):
self.name = name
self.rank = rank
def __repr__(self):
return '<{:s}(name={:s}, rank={:d})>'.format(self.__class__.__name__, self.name, self.rank)
def __str__(self):
rank_str = ''
if self.rank < 0:
rank_str = '{:d}K'.format(-self.rank)
else:
rank_str = '{:d}D'.format(self.rank)
return '{:s} {:s}'.format(self.name, rank_str)
class Ladder:
def __init__(self, standings):
self.standings = standings
def __str__(self):
the_string = 'Ladder standings:'
position = 1
for player in self.standings:
the_string += '\n {:d}. {:s}'.format(position, str(player))
position += 1
return the_string
def players(self):
return set(self.standings)
def match_valid(self, player_one, player_two):
if not {player_one, player_two} < self.players():
return False
return True
if __name__ == '__main__':
ladder = Ladder([Player('Andrew', -1), Player('Walther', 5), Player('Milan', -6)])
print(ladder)
|
17ddd05e35f7cff90530cdb2df0c4971b97e7302
|
cmcb/utils.py
|
cmcb/utils.py
|
import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
import sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
Update logging to log async functions properly
|
Update logging to log async functions properly
|
Python
|
mit
|
festinuz/cmcb,festinuz/cmcb
|
import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
Update logging to log async functions properly
|
import sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
<commit_before>import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
<commit_msg>Update logging to log async functions properly<commit_after>
|
import sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
Update logging to log async functions properlyimport sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
<commit_before>import sys
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
if log:
print('calling', function.__name__, args, kwargs, file=out)
result = function(*args, **kwargs)
if log:
print('result', function.__name__, result, file=out)
return result
return wrapped_function
return wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
<commit_msg>Update logging to log async functions properly<commit_after>import sys
import inspect
from functools import wraps, _make_key
import redis
def logging(*triggers, out=sys.stdout):
"""Will log function if all triggers are True"""
log = min(triggers) # will be False if any trigger is false
def wrapper(function):
@wraps(function)
def wrapped_function(*args, **kwargs):
result = function(*args, **kwargs)
if log:
print(function.__name__, args, kwargs, result, file=out)
return result
return wrapped_function
def async_wrapper(async_function):
@wraps(async_function)
async def wrapped_async_function(*args, **kwargs):
result = await async_function(*args, **kwargs)
if log:
print(async_function.__name__, args, kwargs, result, file=out)
return result
return wrapped_async_function
def cool_wrapper(function):
is_async_function = inspect.iscoroutinefunction(function)
if is_async_function:
return async_wrapper(function)
else:
return wrapper(function)
return cool_wrapper
def redis_timeout_async_method_cache(timeout, redis_url):
def wrapper(async_method):
cache = redis.from_url(redis_url)
@wraps(async_method)
async def wrapped_method(self, *args, **kwargs):
name_and_args = (async_method.__name__,) + tuple(a for a in args)
key = _make_key(name_and_args, kwargs, False)
cached_result = cache.get(key)
if cached_result is not None:
return cached_result.decode('utf-8')
result = await async_method(self, *args, **kwargs)
cache.setex(key, result, timeout)
return result
return wrapped_method
return wrapper
|
564bca1e051f6b1cc068d1dd53de55fcf4dc7c6f
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match)
|
Use SublimeLinter-javac as a base
|
Use SublimeLinter-javac as a base
|
Python
|
mit
|
jawshooah/SublimeLinter-contrib-scalac
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
Use SublimeLinter-javac as a base
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match)
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
<commit_msg>Use SublimeLinter-javac as a base<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match)
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
Use SublimeLinter-javac as a base#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match)
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = ''
cmd = 'scalac'
executable = None
version_args = '--version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 1.0'
regex = r''
multiline = False
line_col_base = (1, 1)
tempfile_suffix = None
error_stream = util.STREAM_BOTH
selectors = {}
word_re = None
defaults = {}
inline_settings = None
inline_overrides = None
comment_re = None
<commit_msg>Use SublimeLinter-javac as a base<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Josh Hagins
# Copyright (c) 2014 Josh Hagins
#
# License: MIT
#
"""This module exports the Scalac plugin class."""
from SublimeLinter.lint import Linter, util
class Scalac(Linter):
"""Provides an interface to scalac."""
syntax = 'scala'
executable = 'scalac'
version_args = '-version'
version_re = r'(?P<version>\d+\.\d+\.\d+)'
version_requirement = '>= 2.11'
regex = (
r'^(?P<file>.+?):(?P<line>\d+): '
r'(?:(?P<error>error)|(?P<warning>warning)): '
r'(?:\[.+?\] )?(?P<message>[^\r\n]+)\r?\n'
r'[^\r\n]+\r?\n'
r'(?P<col>[^\^]*)\^'
)
multiline = True
# line_col_base = (1, 1)
tempfile_suffix = '-'
error_stream = util.STREAM_STDERR
# selectors = {}
# word_re = None
defaults = {
'lint': ''
}
inline_settings = 'lint'
# inline_overrides = None
comment_re = r'\s*/[/*]'
def cmd(self):
"""
Return the command line to execute.
We override this because we have to munge the -Xlint argument
based on the 'lint' setting.
"""
xlint = '-Xlint'
settings = self.get_view_settings()
options = settings.get('lint')
if options:
xlint += ':' + options
return (self.executable_path, xlint, '-encoding', 'UTF8', '*')
def split_match(self, match):
"""
Return the components of the match.
We override this because scalac lints all referenced files,
and we only want errors from the linted file.
"""
if match:
if match.group('file') != self.filename:
match = None
return super().split_match(match)
|
628d65a15b5c51cb7d4a68e1e6babc01a712a538
|
src/redevbazaar.py
|
src/redevbazaar.py
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
Add initial file for GUI
|
Add initial file for GUI
|
Python
|
mit
|
cgsheeh/SFWR3XA3_Redevelopment,cgsheeh/SFWR3XA3_Redevelopment
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)Add initial file for GUI
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
<commit_before>import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)<commit_msg>Add initial file for GUI<commit_after>
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)Add initial file for GUIimport pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
<commit_before>import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)<commit_msg>Add initial file for GUI<commit_after>import pyforms
from pyforms import BaseWidget
from pyforms.Controls import ControlText
class WelcomeScreen(BaseWidget):
def __init__(self):
super(WelcomeScreen, self).__init__("TEST1")
self.testText = ControlText("WHERE IS THIS")
self.mainmenu = [
{ 'Home': [
{'My Listings': self.__getlistingsEvent},
'-',
{'My Orders': self.__getordersEvent},
{'Settings': self.__getsettingsEvent}
]
},
{ 'Messages': [
{'Inbox': self.__getinboxEvent},
{'Send a message': self.__sendmessageEvent},
{'Sent Messages': self.__getoutboxEvent}
]
}
]
def __getlistingsEvent(self):
print "hey"
def __getordersEvent(self):
print "hey"
def __getsettingsEvent(self):
print "hey"
def __getinboxEvent(self):
print "hey"
def __sendmessageEvent(self):
print "hey"
def __getoutboxEvent(self):
print "hey"
if __name__ == "__main__":
pyforms.startApp(WelcomeScreen)
|
13b602c50f3be62b2a3a8b267ba00b685fc0c7fe
|
python/ecep/portal/migrations/0011_auto_20160518_1211.py
|
python/ecep/portal/migrations/0011_auto_20160518_1211.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
Update data migration with new CPS description
|
Update data migration with new CPS description
|
Python
|
mit
|
smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning,smartchicago/chicago-early-learning
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
Update data migration with new CPS description
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
<commit_msg>Update data migration with new CPS description<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
Update data migration with new CPS description# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Visit a child-friendly location near you:</p><ul><li><strong>Loop</strong> 42 W. Madison Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Colman</strong> 4655 S. Dearborn Street Hours: 9:00 AM - 5:00 PM</li><li><strong>Hall Mall</strong> 4638 W. Diversey Avenue Hours 8:00 AM - 5:00 PM</li></ul><p>All sites are open until 7:00 PM on Wednesdays!</p><p>Many people find it helpful to make a plan to visit. You can make your plan <a href="/static/files/enrollment-plan-cps.pdf">here</a>.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
<commit_msg>Update data migration with new CPS description<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_enrollment_info(apps, schema_editor):
"""
Populate the Enrollment info based on static text
"""
Location = apps.get_model('portal', 'Location')
for loc in Location.objects.all():
if loc.is_cps_based:
loc.enrollment_en = """<p>Chicago Public Schools early childhood school based preschool programs work to ensure children ages 3 and 4 years old, particularly those most in need, have access to high-quality programs. Schools are committed to creating an engaging, developmentally appropriate learning environment that supports and respects the unique potential of each individual child through best professional practices, parent engagement, and community involvement.</p>"""
loc.save()
class Migration(migrations.Migration):
dependencies = [
('portal', '0010_auto_20160518_1210'),
]
operations = [
migrations.RunPython(populate_enrollment_info),
]
|
b286e03f96cce8518dd60b74ff8dac6d7b7c5a97
|
octohatrack/helpers.py
|
octohatrack/helpers.py
|
#!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
#!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
Support users with no name
|
display_results: Support users with no name
A user without a name causes an exception, and
the dedup algorithm also merged users without
a name as all of their names were `None`.
Fixes https://github.com/LABHR/octohatrack/issues/103
|
Python
|
bsd-3-clause
|
LABHR/octohatrack,glasnt/octohat
|
#!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
display_results: Support users with no name
A user without a name causes an exception, and
the dedup algorithm also merged users without
a name as all of their names were `None`.
Fixes https://github.com/LABHR/octohatrack/issues/103
|
#!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
<commit_before>#!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
<commit_msg>display_results: Support users with no name
A user without a name causes an exception, and
the dedup algorithm also merged users without
a name as all of their names were `None`.
Fixes https://github.com/LABHR/octohatrack/issues/103<commit_after>
|
#!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
#!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
display_results: Support users with no name
A user without a name causes an exception, and
the dedup algorithm also merged users without
a name as all of their names were `None`.
Fixes https://github.com/LABHR/octohatrack/issues/103#!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
<commit_before>#!/usr/bin/env python
import sys
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=lambda k: k['name'].lower()):
if user["name"] not in seen:
seen.append(user["name"])
if user["name"] != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
<commit_msg>display_results: Support users with no name
A user without a name causes an exception, and
the dedup algorithm also merged users without
a name as all of their names were `None`.
Fixes https://github.com/LABHR/octohatrack/issues/103<commit_after>#!/usr/bin/env python
import sys
def _sort_by_name(contributor):
if contributor.get('name'):
return contributor['name'].lower()
return contributor['user_name']
def display_results(repo_name, contributors, api_len):
"""
Fancy display.
"""
print("\n")
print("All Contributors:")
# Sort and consolidate on Name
seen = []
for user in sorted(contributors, key=_sort_by_name):
if user.get('name'):
key = user['name']
else:
key = user['user_name']
if key not in seen:
seen.append(key)
if key != user["user_name"]:
print("%s (%s)" % (user["name"], user['user_name']))
else:
print(user["user_name"])
print("")
print("Repo: %s" % repo_name)
print("GitHub Contributors: %s" % api_len)
print("All Contributors: %s 👏" % len(seen))
def progress():
"""
Append an dot
"""
sys.stdout.write(".")
sys.stdout.flush()
def progress_message(message):
sys.stdout.write("\n")
sys.stdout.write("%s..." % message)
sys.stdout.flush()
|
866f95cfb0db14da0596efe41a128baf2a3a1cfe
|
django_basic_tinymce_flatpages/admin.py
|
django_basic_tinymce_flatpages/admin.py
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
Fix form PageForm needs updating.
|
Fix form PageForm needs updating.
|
Python
|
bsd-3-clause
|
ad-m/django-basic-tinymce-flatpages
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
Fix form PageForm needs updating.
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
<commit_msg>Fix form PageForm needs updating.<commit_after>
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
Fix form PageForm needs updating.from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
<commit_before>from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
<commit_msg>Fix form PageForm needs updating.<commit_after>from django.conf import settings
from django.contrib import admin
from django.contrib.flatpages.admin import FlatpageForm, FlatPageAdmin
from django.contrib.flatpages.models import FlatPage
from django.utils.module_loading import import_string
FLATPAGE_WIDGET = getattr(settings, 'FLATPAGE_WIDGET', 'tinymce.widgets.TinyMCE')
FLATPAGE_WIDGET_KWARGS = getattr(settings, 'FLATPAGE_WIDGET_KWARGS',
{'attrs': {'cols': 100, 'rows': 15}})
class PageForm(FlatpageForm):
class Meta:
model = FlatPage
widgets = {
'content': import_string(FLATPAGE_WIDGET)(**FLATPAGE_WIDGET_KWARGS),
}
fields = '__all__'
class PageAdmin(FlatPageAdmin):
"""
Page Admin
"""
form = PageForm
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, PageAdmin)
|
d1008437dcf618700bce53913f3450aceda8a23f
|
djangoautoconf/auto_conf_admin_utils.py
|
djangoautoconf/auto_conf_admin_utils.py
|
from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
Remove xadmin as it will not work with guardian.
|
Remove xadmin as it will not work with guardian.
|
Python
|
bsd-3-clause
|
weijia/djangoautoconf,weijia/djangoautoconf
|
from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)Remove xadmin as it will not work with guardian.
|
from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
<commit_before>from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)<commit_msg>Remove xadmin as it will not work with guardian.<commit_after>
|
from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)Remove xadmin as it will not work with guardian.from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
<commit_before>from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)<commit_msg>Remove xadmin as it will not work with guardian.<commit_after>from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
|
7bfa9d24f7af811746bbb0336b5e75a592cff186
|
aws_eis/lib/checks.py
|
aws_eis/lib/checks.py
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
Fix KeyError: 'version' due to 403 Forbidden error
|
Fix KeyError: 'version' due to 403 Forbidden error
|
Python
|
mit
|
jpdoria/aws_eis
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
Fix KeyError: 'version' due to 403 Forbidden error
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
<commit_before>import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
<commit_msg>Fix KeyError: 'version' due to 403 Forbidden error<commit_after>
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
Fix KeyError: 'version' due to 403 Forbidden errorimport json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
<commit_before>import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = get_version(endpoint)
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
else:
print(json.loads(msg)['Message'])
print('Status: {}'.format(status_code))
sys.exit(1)
<commit_msg>Fix KeyError: 'version' due to 403 Forbidden error<commit_after>import json
import sys
import requests
def py_version():
if sys.version_info < (3, 0, 0):
print(sys.version)
print('You must use Python 3.x to run this application.')
sys.exit(1)
def get_version(endpoint):
r = requests.get('https://{}'.format(endpoint))
es_version = json.loads(r.text)['version']['number']
return es_version
def test_con(endpoint):
r = requests.get('https://{}'.format(endpoint))
try:
es_version = get_version(endpoint)
except KeyError:
print('Status: {}'.format(r.status_code))
sys.exit(1)
else:
if r.status_code == 200:
print('ESVersion: {}'.format(es_version))
print('Connection: OK')
print('Status: {}\n'.format(r.status_code))
|
cd006f8d3885005e867255e63819fc8a5c7430bf
|
redactor/TextEditor.py
|
redactor/TextEditor.py
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
|
Add getter for text widget
|
Add getter for text widget
|
Python
|
mit
|
BrickText/BrickText
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
Add getter for text widget
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
|
<commit_before>from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
<commit_msg>Add getter for text widget<commit_after>
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
|
from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
Add getter for text widgetfrom tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
|
<commit_before>from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_panel(self):
return self.text_panel
<commit_msg>Add getter for text widget<commit_after>from tkinter import *
class TextEditor():
def __init__(self):
self.root = Tk()
self.root.wm_title("BrickText")
self.text_panel = Text(self.root)
self.text_panel.pack(fill=BOTH, expand=YES)
def start(self):
self.root.mainloop()
def get_root(self):
return self.root
def get_text_widget(self):
return self.editor
def get_text_panel(self):
return self.text_panel
|
7c02a79a5eb2dd6b9b49b2eefbdde1064a73de17
|
redpanal/core/forms.py
|
redpanal/core/forms.py
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
|
Fix exception handling in TagField
|
Fix exception handling in TagField
|
Python
|
agpl-3.0
|
RedPanal/redpanal,RedPanal/redpanal,RedPanal/redpanal
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
Fix exception handling in TagField
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
|
<commit_before>from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
<commit_msg>Fix exception handling in TagField<commit_after>
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
|
from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
Fix exception handling in TagFieldfrom django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
|
<commit_before>from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(e.msg)
<commit_msg>Fix exception handling in TagField<commit_after>from django import forms
from django.utils.translation import ugettext as _
from taggit.utils import parse_tags, edit_string_for_tags
class TagParseError(Exception):
pass
def tags_to_editable_string(tags):
return u' '.join([u"#%s" % t for t in tags])
def parse_tags(string):
tags = string.split()
for tag in tags:
if not tag.startswith('#'):
raise TagParseError(_("Tag '%s' does not start with #" % tag))
return [tag[1:] for tag in tags if len(tag) > 1]
class TagWidget(forms.TextInput):
def render(self, name, value, attrs=None, renderer=None):
if value is not None and not isinstance(value, str):
value = tags_to_editable_string([o.tag for o in value.select_related("tag")])
return super(TagWidget, self).render(name, value, attrs)
class TagField(forms.CharField):
widget = TagWidget
help_text = "asdasd"
def clean(self, value):
value = super(TagField, self).clean(value)
try:
return parse_tags(value)
except TagParseError as e:
raise forms.ValidationError(str(e))
|
ae11251f7669e4ddde6f0491ff1fe0afdfd54a7a
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
Change 'language' to 'syntax', that is more precise terminology.
|
Change 'language' to 'syntax', that is more precise terminology.
|
Python
|
mit
|
SublimeLinter/SublimeLinter-jsl
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
Change 'language' to 'syntax', that is more precise terminology.
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
<commit_msg>Change 'language' to 'syntax', that is more precise terminology.<commit_after>
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
Change 'language' to 'syntax', that is more precise terminology.#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
<commit_before>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
language = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
<commit_msg>Change 'language' to 'syntax', that is more precise terminology.<commit_after>#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Aparajita Fishman
# Copyright (c) 2013 Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter-contrib-jsl
# License: MIT
#
"""This module exports the JSL plugin linter class."""
from SublimeLinter.lint import Linter
class JSL(Linter):
"""Provides an interface to the jsl executable."""
syntax = ('javascript', 'html')
cmd = 'jsl -stdin -nologo -nosummary'
regex = r'''(?xi)
# First line is (lineno): type: error message
^\((?P<line>\d+)\):.*?(?:(?P<warning>warning)|(?P<error>error)):\s*(?P<message>.+)$\r?\n
# Second line is the line of code
^.*$\r?\n
# Third line is a caret pointing to the position of the error
^(?P<col>[^\^]*)\^$
'''
multiline = True
defaults = {
'-conf:': None
}
selectors = {
'html': 'source.js.embedded.html'
}
|
8b889c10abf043f6612409973458e8a0f0ed952e
|
bonus_level.py
|
bonus_level.py
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
Initialize the log counter to 0xFF
|
Initialize the log counter to 0xFF
|
Python
|
mit
|
japesinator/Bad-Crypto,japesinator/Bad-Crypto
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
Initialize the log counter to 0xFF
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
<commit_before>#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
<commit_msg>Initialize the log counter to 0xFF<commit_after>
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
Initialize the log counter to 0xFF#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
<commit_before>#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
<commit_msg>Initialize the log counter to 0xFF<commit_after>#!/usr/bin/env python
from time import sleep
# Usage: ./bonus_level.py, then input your plaintext enclosed by quotes
# Note: I haven't made a ciphertext for this because the attack on it depends
# a lot on the machine it was implemented on
secret = input("Please enter your plaintext: ")
for char in secret:
log_counter = 0xFF
for i in range(ord(char)):
sleep(0.01)
log_counter ^= i
print log_counter
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.