commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
66f2f1caf8b9f7e5325111a995b993d61df9aed1
|
raft/store.py
|
raft/store.py
|
import os
import errno
import uuid
import msgpack # we're using it anyway...
def read_state():
sfile = '/tmp/raft-state'
try:
with open(sfile) as r:
return msgpack.unpackb(r.read())
except IOError as e:
if not e.errno == errno.ENOENT:
raise
# no state file exists; initialize with fresh values
return 0, None, [], [], uuid.uuid4().hex
def write_state(term, voted, log, peers, uuid):
sfile = '/tmp/raft-state'
with open(sfile, 'w') as w:
w.write(msgpack.packb((term, voted, log, peers, uuid)))
|
Save and load persistent state.
|
Save and load persistent state.
|
Python
|
unlicense
|
kurin/py-raft
|
Save and load persistent state.
|
import os
import errno
import uuid
import msgpack # we're using it anyway...
def read_state():
sfile = '/tmp/raft-state'
try:
with open(sfile) as r:
return msgpack.unpackb(r.read())
except IOError as e:
if not e.errno == errno.ENOENT:
raise
# no state file exists; initialize with fresh values
return 0, None, [], [], uuid.uuid4().hex
def write_state(term, voted, log, peers, uuid):
sfile = '/tmp/raft-state'
with open(sfile, 'w') as w:
w.write(msgpack.packb((term, voted, log, peers, uuid)))
|
<commit_before><commit_msg>Save and load persistent state.<commit_after>
|
import os
import errno
import uuid
import msgpack # we're using it anyway...
def read_state():
sfile = '/tmp/raft-state'
try:
with open(sfile) as r:
return msgpack.unpackb(r.read())
except IOError as e:
if not e.errno == errno.ENOENT:
raise
# no state file exists; initialize with fresh values
return 0, None, [], [], uuid.uuid4().hex
def write_state(term, voted, log, peers, uuid):
sfile = '/tmp/raft-state'
with open(sfile, 'w') as w:
w.write(msgpack.packb((term, voted, log, peers, uuid)))
|
Save and load persistent state.import os
import errno
import uuid
import msgpack # we're using it anyway...
def read_state():
sfile = '/tmp/raft-state'
try:
with open(sfile) as r:
return msgpack.unpackb(r.read())
except IOError as e:
if not e.errno == errno.ENOENT:
raise
# no state file exists; initialize with fresh values
return 0, None, [], [], uuid.uuid4().hex
def write_state(term, voted, log, peers, uuid):
sfile = '/tmp/raft-state'
with open(sfile, 'w') as w:
w.write(msgpack.packb((term, voted, log, peers, uuid)))
|
<commit_before><commit_msg>Save and load persistent state.<commit_after>import os
import errno
import uuid
import msgpack # we're using it anyway...
def read_state():
sfile = '/tmp/raft-state'
try:
with open(sfile) as r:
return msgpack.unpackb(r.read())
except IOError as e:
if not e.errno == errno.ENOENT:
raise
# no state file exists; initialize with fresh values
return 0, None, [], [], uuid.uuid4().hex
def write_state(term, voted, log, peers, uuid):
sfile = '/tmp/raft-state'
with open(sfile, 'w') as w:
w.write(msgpack.packb((term, voted, log, peers, uuid)))
|
|
1049ee1d79d5c4aa80f44011d0ee21df1226c5fc
|
tests/test_identifying_headers.py
|
tests/test_identifying_headers.py
|
import datetime
import os
import unittest
from tigershark.facade.common import IdentifyingHeaders
from tigershark.parsers import IdentifyingParser
class TestIdentifyingHeaders(unittest.TestCase):
def parse_file(self, name):
with open(os.path.join('tests', name)) as f:
parsed = IdentifyingParser.unmarshall(
f.read().strip(), ignoreExtra=True)
return IdentifyingHeaders(parsed)
def test_5010_details(self):
facade = self.parse_file('5010-835-example-1.txt')
control = facade.facades[0].interchange_control
self.assertEqual(control.authorization_information_qualifier, '00')
self.assertEqual(control.authorization_information, ' ')
self.assertEqual(control.security_information_qualifier, '00')
self.assertEqual(control.security_information, ' ')
self.assertEqual(control.interchange_sender_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_sender_id, '5010TEST ')
self.assertEqual(control.interchange_receiver_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_receiver_id, '835RECVR ')
self.assertEqual(control.interchange_date, datetime.date(2011, 9, 30))
self.assertEqual(control.interchange_time, datetime.time(11, 5))
self.assertEqual(control.interchange_control_standards_id, '^')
self.assertEqual(control.interchange_control_version_number, '00501')
self.assertEqual(control.interchange_control_number, '000004592')
self.assertEqual(control.acknowledgement_requested, '0')
self.assertEqual(control.test_indicator, 'T')
self.assertEqual(control.subelement_separator, '|')
|
Add 5010 generic ISA facade tests.
|
Add 5010 generic ISA facade tests.
|
Python
|
bsd-3-clause
|
jdavisp3/TigerShark,jdavisp3/TigerShark,jdavisp3/TigerShark,jdavisp3/TigerShark
|
Add 5010 generic ISA facade tests.
|
import datetime
import os
import unittest
from tigershark.facade.common import IdentifyingHeaders
from tigershark.parsers import IdentifyingParser
class TestIdentifyingHeaders(unittest.TestCase):
def parse_file(self, name):
with open(os.path.join('tests', name)) as f:
parsed = IdentifyingParser.unmarshall(
f.read().strip(), ignoreExtra=True)
return IdentifyingHeaders(parsed)
def test_5010_details(self):
facade = self.parse_file('5010-835-example-1.txt')
control = facade.facades[0].interchange_control
self.assertEqual(control.authorization_information_qualifier, '00')
self.assertEqual(control.authorization_information, ' ')
self.assertEqual(control.security_information_qualifier, '00')
self.assertEqual(control.security_information, ' ')
self.assertEqual(control.interchange_sender_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_sender_id, '5010TEST ')
self.assertEqual(control.interchange_receiver_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_receiver_id, '835RECVR ')
self.assertEqual(control.interchange_date, datetime.date(2011, 9, 30))
self.assertEqual(control.interchange_time, datetime.time(11, 5))
self.assertEqual(control.interchange_control_standards_id, '^')
self.assertEqual(control.interchange_control_version_number, '00501')
self.assertEqual(control.interchange_control_number, '000004592')
self.assertEqual(control.acknowledgement_requested, '0')
self.assertEqual(control.test_indicator, 'T')
self.assertEqual(control.subelement_separator, '|')
|
<commit_before><commit_msg>Add 5010 generic ISA facade tests.<commit_after>
|
import datetime
import os
import unittest
from tigershark.facade.common import IdentifyingHeaders
from tigershark.parsers import IdentifyingParser
class TestIdentifyingHeaders(unittest.TestCase):
def parse_file(self, name):
with open(os.path.join('tests', name)) as f:
parsed = IdentifyingParser.unmarshall(
f.read().strip(), ignoreExtra=True)
return IdentifyingHeaders(parsed)
def test_5010_details(self):
facade = self.parse_file('5010-835-example-1.txt')
control = facade.facades[0].interchange_control
self.assertEqual(control.authorization_information_qualifier, '00')
self.assertEqual(control.authorization_information, ' ')
self.assertEqual(control.security_information_qualifier, '00')
self.assertEqual(control.security_information, ' ')
self.assertEqual(control.interchange_sender_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_sender_id, '5010TEST ')
self.assertEqual(control.interchange_receiver_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_receiver_id, '835RECVR ')
self.assertEqual(control.interchange_date, datetime.date(2011, 9, 30))
self.assertEqual(control.interchange_time, datetime.time(11, 5))
self.assertEqual(control.interchange_control_standards_id, '^')
self.assertEqual(control.interchange_control_version_number, '00501')
self.assertEqual(control.interchange_control_number, '000004592')
self.assertEqual(control.acknowledgement_requested, '0')
self.assertEqual(control.test_indicator, 'T')
self.assertEqual(control.subelement_separator, '|')
|
Add 5010 generic ISA facade tests.import datetime
import os
import unittest
from tigershark.facade.common import IdentifyingHeaders
from tigershark.parsers import IdentifyingParser
class TestIdentifyingHeaders(unittest.TestCase):
def parse_file(self, name):
with open(os.path.join('tests', name)) as f:
parsed = IdentifyingParser.unmarshall(
f.read().strip(), ignoreExtra=True)
return IdentifyingHeaders(parsed)
def test_5010_details(self):
facade = self.parse_file('5010-835-example-1.txt')
control = facade.facades[0].interchange_control
self.assertEqual(control.authorization_information_qualifier, '00')
self.assertEqual(control.authorization_information, ' ')
self.assertEqual(control.security_information_qualifier, '00')
self.assertEqual(control.security_information, ' ')
self.assertEqual(control.interchange_sender_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_sender_id, '5010TEST ')
self.assertEqual(control.interchange_receiver_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_receiver_id, '835RECVR ')
self.assertEqual(control.interchange_date, datetime.date(2011, 9, 30))
self.assertEqual(control.interchange_time, datetime.time(11, 5))
self.assertEqual(control.interchange_control_standards_id, '^')
self.assertEqual(control.interchange_control_version_number, '00501')
self.assertEqual(control.interchange_control_number, '000004592')
self.assertEqual(control.acknowledgement_requested, '0')
self.assertEqual(control.test_indicator, 'T')
self.assertEqual(control.subelement_separator, '|')
|
<commit_before><commit_msg>Add 5010 generic ISA facade tests.<commit_after>import datetime
import os
import unittest
from tigershark.facade.common import IdentifyingHeaders
from tigershark.parsers import IdentifyingParser
class TestIdentifyingHeaders(unittest.TestCase):
def parse_file(self, name):
with open(os.path.join('tests', name)) as f:
parsed = IdentifyingParser.unmarshall(
f.read().strip(), ignoreExtra=True)
return IdentifyingHeaders(parsed)
def test_5010_details(self):
facade = self.parse_file('5010-835-example-1.txt')
control = facade.facades[0].interchange_control
self.assertEqual(control.authorization_information_qualifier, '00')
self.assertEqual(control.authorization_information, ' ')
self.assertEqual(control.security_information_qualifier, '00')
self.assertEqual(control.security_information, ' ')
self.assertEqual(control.interchange_sender_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_sender_id, '5010TEST ')
self.assertEqual(control.interchange_receiver_id_qualifier, 'ZZ')
self.assertEqual(control.interchange_receiver_id, '835RECVR ')
self.assertEqual(control.interchange_date, datetime.date(2011, 9, 30))
self.assertEqual(control.interchange_time, datetime.time(11, 5))
self.assertEqual(control.interchange_control_standards_id, '^')
self.assertEqual(control.interchange_control_version_number, '00501')
self.assertEqual(control.interchange_control_number, '000004592')
self.assertEqual(control.acknowledgement_requested, '0')
self.assertEqual(control.test_indicator, 'T')
self.assertEqual(control.subelement_separator, '|')
|
|
6599f8ab3bc99e5d2cdb222856b725972c4b4d16
|
scripts/anonscrobbles.py
|
scripts/anonscrobbles.py
|
#!/usr/bin/env python
import random
s = open("scrobbledump.sql", "r")
o = open("scrobbles.anonymous.sql", "w")
datasection = False
usermap = {}
#track, artist, "time", mbid, album, source, rating, length, stid, userid, track_tsv, artist_tsv
for line in s.readlines():
if line.rstrip() == "\.":
datasection = False
if datasection:
data = line.split("\t")
uid = data[9]
if uid in usermap:
data[9] = str(usermap[uid])
else:
newid = random.randint(0, 1000000)
while newid in usermap:
newid = random.randint(0, 1000000)
usermap[uid] = newid
data[9] = str(newid)
o.write("\t".join(data))
else:
o.write(line)
if line[:4] == "COPY":
datasection = True
s.close()
o.close()
|
Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency
|
Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency
|
Python
|
agpl-3.0
|
foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm
|
Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency
|
#!/usr/bin/env python
import random
s = open("scrobbledump.sql", "r")
o = open("scrobbles.anonymous.sql", "w")
datasection = False
usermap = {}
#track, artist, "time", mbid, album, source, rating, length, stid, userid, track_tsv, artist_tsv
for line in s.readlines():
if line.rstrip() == "\.":
datasection = False
if datasection:
data = line.split("\t")
uid = data[9]
if uid in usermap:
data[9] = str(usermap[uid])
else:
newid = random.randint(0, 1000000)
while newid in usermap:
newid = random.randint(0, 1000000)
usermap[uid] = newid
data[9] = str(newid)
o.write("\t".join(data))
else:
o.write(line)
if line[:4] == "COPY":
datasection = True
s.close()
o.close()
|
<commit_before><commit_msg>Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency<commit_after>
|
#!/usr/bin/env python
import random
s = open("scrobbledump.sql", "r")
o = open("scrobbles.anonymous.sql", "w")
datasection = False
usermap = {}
#track, artist, "time", mbid, album, source, rating, length, stid, userid, track_tsv, artist_tsv
for line in s.readlines():
if line.rstrip() == "\.":
datasection = False
if datasection:
data = line.split("\t")
uid = data[9]
if uid in usermap:
data[9] = str(usermap[uid])
else:
newid = random.randint(0, 1000000)
while newid in usermap:
newid = random.randint(0, 1000000)
usermap[uid] = newid
data[9] = str(newid)
o.write("\t".join(data))
else:
o.write(line)
if line[:4] == "COPY":
datasection = True
s.close()
o.close()
|
Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency#!/usr/bin/env python
import random
s = open("scrobbledump.sql", "r")
o = open("scrobbles.anonymous.sql", "w")
datasection = False
usermap = {}
#track, artist, "time", mbid, album, source, rating, length, stid, userid, track_tsv, artist_tsv
for line in s.readlines():
if line.rstrip() == "\.":
datasection = False
if datasection:
data = line.split("\t")
uid = data[9]
if uid in usermap:
data[9] = str(usermap[uid])
else:
newid = random.randint(0, 1000000)
while newid in usermap:
newid = random.randint(0, 1000000)
usermap[uid] = newid
data[9] = str(newid)
o.write("\t".join(data))
else:
o.write(line)
if line[:4] == "COPY":
datasection = True
s.close()
o.close()
|
<commit_before><commit_msg>Add hacky script for anonymising dumps of the Scrobbles table whilst still maintaining internal consistency<commit_after>#!/usr/bin/env python
import random
s = open("scrobbledump.sql", "r")
o = open("scrobbles.anonymous.sql", "w")
datasection = False
usermap = {}
#track, artist, "time", mbid, album, source, rating, length, stid, userid, track_tsv, artist_tsv
for line in s.readlines():
if line.rstrip() == "\.":
datasection = False
if datasection:
data = line.split("\t")
uid = data[9]
if uid in usermap:
data[9] = str(usermap[uid])
else:
newid = random.randint(0, 1000000)
while newid in usermap:
newid = random.randint(0, 1000000)
usermap[uid] = newid
data[9] = str(newid)
o.write("\t".join(data))
else:
o.write(line)
if line[:4] == "COPY":
datasection = True
s.close()
o.close()
|
|
44da0c97ac662375dd45d201cce88c32896ca361
|
scripts/list-checkins.py
|
scripts/list-checkins.py
|
#!/usr/bin/env python
# This script retrieves a detailed list of the currently running checkins
import argparse
import concurrent.futures
import datetime
import json
import sys
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
Add script to list scheduled checkins
|
Add script to list scheduled checkins
|
Python
|
mit
|
DavidWittman/serverless-southwest-check-in
|
Add script to list scheduled checkins
|
#!/usr/bin/env python
# This script retrieves a detailed list of the currently running checkins
import argparse
import concurrent.futures
import datetime
import json
import sys
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Add script to list scheduled checkins<commit_after>
|
#!/usr/bin/env python
# This script retrieves a detailed list of the currently running checkins
import argparse
import concurrent.futures
import datetime
import json
import sys
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
Add script to list scheduled checkins#!/usr/bin/env python
# This script retrieves a detailed list of the currently running checkins
import argparse
import concurrent.futures
import datetime
import json
import sys
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Add script to list scheduled checkins<commit_after>#!/usr/bin/env python
# This script retrieves a detailed list of the currently running checkins
import argparse
import concurrent.futures
import datetime
import json
import sys
import boto3
SFN = boto3.client('stepfunctions')
def format_date_fields(obj):
for key in obj:
if isinstance(obj[key], datetime.datetime):
obj[key] = obj[key].isoformat()
return obj
def get_execution_details(execution_arn):
e = SFN.describe_execution(executionArn=execution_arn)
e = format_date_fields(e)
del e['ResponseMetadata']
return e
def main(args):
results = []
state_machine_arn = args.state_machine_arn
# TODO(dw): pagination for > 100 executions
executions = SFN.list_executions(
stateMachineArn=state_machine_arn,
statusFilter='RUNNING'
)
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
futures = []
for e in executions['executions']:
future = executor.submit(get_execution_details, e['executionArn'])
futures.append(future)
for future in concurrent.futures.as_completed(futures):
results.append(future.result())
print(json.dumps(results))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--state-machine-arn', required=True)
args = parser.parse_args()
main(args)
|
|
bda3fa5dfea480f8db5393602616d6a4b430175e
|
scripts/renumber_ants.py
|
scripts/renumber_ants.py
|
#!/usr/bin/env python
"""
A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.
"""
import numpy as np
import os
import argparse
from pyuvdata import UVData
# setup argparse
a = argparse.ArgumentParser(description="A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.")
a.add_argument("file_in", type=str, help="input uvfits file.")
a.add_argument("file_out", type=str, help="output uvfits file.")
a.add_argument("--overwrite", default=False, action='store_true', help="overwrite output file if it already exists.")
a.add_argument("--verbose", default=False, action='store_true', help="report feedback to stdout.")
# get args
args = a.parse_args()
uv_obj = UVData()
uv_obj.read_uvfits(args.file_in)
large_ant_nums = uv_obj.antenna_numbers[np.where(uv_obj.antenna_numbers > 255)[0]]
new_nums = sorted(list(set(range(256)) - set(uv_obj.antenna_numbers)))
if len(new_nums) < len(large_ant_nums):
raise ValueError('too many antennas in dataset, cannot renumber all below 255')
new_nums = new_nums[-1 * len(large_ant_nums):]
renumber_dict = dict(zip(large_ant_nums, new_nums))
for ant_in, ant_out in renumber_dict.iteritems():
if args.verbose:
print "renumbering {a1} to {a2}".format(a1=ant_in, a2=ant_out)
wh_ant_num = np.where(uv_obj.antenna_numbers == ant_in)[0]
wh_ant1_arr = np.where(uv_obj.ant_1_array == ant_in)[0]
wh_ant2_arr = np.where(uv_obj.ant_2_array == ant_in)[0]
uv_obj.antenna_numbers[wh_ant_num] = ant_out
uv_obj.ant_1_array[wh_ant1_arr] = ant_out
uv_obj.ant_2_array[wh_ant2_arr] = ant_out
blt_inds = np.array(sorted(list(set(wh_ant1_arr.tolist() + wh_ant2_arr.tolist()))))
uv_obj.baseline_array[blt_inds] = uv_obj.antnums_to_baseline(uv_obj.ant_1_array[blt_inds], uv_obj.ant_2_array[blt_inds])
uv_obj.check()
uv_obj.write_uvfits(args.file_out)
|
Add a script to renumber antennas to below 256 in uvfits files
|
Add a script to renumber antennas to below 256 in uvfits files
|
Python
|
bsd-2-clause
|
HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata,HERA-Team/pyuvdata
|
Add a script to renumber antennas to below 256 in uvfits files
|
#!/usr/bin/env python
"""
A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.
"""
import numpy as np
import os
import argparse
from pyuvdata import UVData
# setup argparse
a = argparse.ArgumentParser(description="A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.")
a.add_argument("file_in", type=str, help="input uvfits file.")
a.add_argument("file_out", type=str, help="output uvfits file.")
a.add_argument("--overwrite", default=False, action='store_true', help="overwrite output file if it already exists.")
a.add_argument("--verbose", default=False, action='store_true', help="report feedback to stdout.")
# get args
args = a.parse_args()
uv_obj = UVData()
uv_obj.read_uvfits(args.file_in)
large_ant_nums = uv_obj.antenna_numbers[np.where(uv_obj.antenna_numbers > 255)[0]]
new_nums = sorted(list(set(range(256)) - set(uv_obj.antenna_numbers)))
if len(new_nums) < len(large_ant_nums):
raise ValueError('too many antennas in dataset, cannot renumber all below 255')
new_nums = new_nums[-1 * len(large_ant_nums):]
renumber_dict = dict(zip(large_ant_nums, new_nums))
for ant_in, ant_out in renumber_dict.iteritems():
if args.verbose:
print "renumbering {a1} to {a2}".format(a1=ant_in, a2=ant_out)
wh_ant_num = np.where(uv_obj.antenna_numbers == ant_in)[0]
wh_ant1_arr = np.where(uv_obj.ant_1_array == ant_in)[0]
wh_ant2_arr = np.where(uv_obj.ant_2_array == ant_in)[0]
uv_obj.antenna_numbers[wh_ant_num] = ant_out
uv_obj.ant_1_array[wh_ant1_arr] = ant_out
uv_obj.ant_2_array[wh_ant2_arr] = ant_out
blt_inds = np.array(sorted(list(set(wh_ant1_arr.tolist() + wh_ant2_arr.tolist()))))
uv_obj.baseline_array[blt_inds] = uv_obj.antnums_to_baseline(uv_obj.ant_1_array[blt_inds], uv_obj.ant_2_array[blt_inds])
uv_obj.check()
uv_obj.write_uvfits(args.file_out)
|
<commit_before><commit_msg>Add a script to renumber antennas to below 256 in uvfits files<commit_after>
|
#!/usr/bin/env python
"""
A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.
"""
import numpy as np
import os
import argparse
from pyuvdata import UVData
# setup argparse
a = argparse.ArgumentParser(description="A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.")
a.add_argument("file_in", type=str, help="input uvfits file.")
a.add_argument("file_out", type=str, help="output uvfits file.")
a.add_argument("--overwrite", default=False, action='store_true', help="overwrite output file if it already exists.")
a.add_argument("--verbose", default=False, action='store_true', help="report feedback to stdout.")
# get args
args = a.parse_args()
uv_obj = UVData()
uv_obj.read_uvfits(args.file_in)
large_ant_nums = uv_obj.antenna_numbers[np.where(uv_obj.antenna_numbers > 255)[0]]
new_nums = sorted(list(set(range(256)) - set(uv_obj.antenna_numbers)))
if len(new_nums) < len(large_ant_nums):
raise ValueError('too many antennas in dataset, cannot renumber all below 255')
new_nums = new_nums[-1 * len(large_ant_nums):]
renumber_dict = dict(zip(large_ant_nums, new_nums))
for ant_in, ant_out in renumber_dict.iteritems():
if args.verbose:
print "renumbering {a1} to {a2}".format(a1=ant_in, a2=ant_out)
wh_ant_num = np.where(uv_obj.antenna_numbers == ant_in)[0]
wh_ant1_arr = np.where(uv_obj.ant_1_array == ant_in)[0]
wh_ant2_arr = np.where(uv_obj.ant_2_array == ant_in)[0]
uv_obj.antenna_numbers[wh_ant_num] = ant_out
uv_obj.ant_1_array[wh_ant1_arr] = ant_out
uv_obj.ant_2_array[wh_ant2_arr] = ant_out
blt_inds = np.array(sorted(list(set(wh_ant1_arr.tolist() + wh_ant2_arr.tolist()))))
uv_obj.baseline_array[blt_inds] = uv_obj.antnums_to_baseline(uv_obj.ant_1_array[blt_inds], uv_obj.ant_2_array[blt_inds])
uv_obj.check()
uv_obj.write_uvfits(args.file_out)
|
Add a script to renumber antennas to below 256 in uvfits files#!/usr/bin/env python
"""
A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.
"""
import numpy as np
import os
import argparse
from pyuvdata import UVData
# setup argparse
a = argparse.ArgumentParser(description="A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.")
a.add_argument("file_in", type=str, help="input uvfits file.")
a.add_argument("file_out", type=str, help="output uvfits file.")
a.add_argument("--overwrite", default=False, action='store_true', help="overwrite output file if it already exists.")
a.add_argument("--verbose", default=False, action='store_true', help="report feedback to stdout.")
# get args
args = a.parse_args()
uv_obj = UVData()
uv_obj.read_uvfits(args.file_in)
large_ant_nums = uv_obj.antenna_numbers[np.where(uv_obj.antenna_numbers > 255)[0]]
new_nums = sorted(list(set(range(256)) - set(uv_obj.antenna_numbers)))
if len(new_nums) < len(large_ant_nums):
raise ValueError('too many antennas in dataset, cannot renumber all below 255')
new_nums = new_nums[-1 * len(large_ant_nums):]
renumber_dict = dict(zip(large_ant_nums, new_nums))
for ant_in, ant_out in renumber_dict.iteritems():
if args.verbose:
print "renumbering {a1} to {a2}".format(a1=ant_in, a2=ant_out)
wh_ant_num = np.where(uv_obj.antenna_numbers == ant_in)[0]
wh_ant1_arr = np.where(uv_obj.ant_1_array == ant_in)[0]
wh_ant2_arr = np.where(uv_obj.ant_2_array == ant_in)[0]
uv_obj.antenna_numbers[wh_ant_num] = ant_out
uv_obj.ant_1_array[wh_ant1_arr] = ant_out
uv_obj.ant_2_array[wh_ant2_arr] = ant_out
blt_inds = np.array(sorted(list(set(wh_ant1_arr.tolist() + wh_ant2_arr.tolist()))))
uv_obj.baseline_array[blt_inds] = uv_obj.antnums_to_baseline(uv_obj.ant_1_array[blt_inds], uv_obj.ant_2_array[blt_inds])
uv_obj.check()
uv_obj.write_uvfits(args.file_out)
|
<commit_before><commit_msg>Add a script to renumber antennas to below 256 in uvfits files<commit_after>#!/usr/bin/env python
"""
A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.
"""
import numpy as np
import os
import argparse
from pyuvdata import UVData
# setup argparse
a = argparse.ArgumentParser(description="A command-line script for renumbering antenna numbers > 255 if possible in uvfits files.")
a.add_argument("file_in", type=str, help="input uvfits file.")
a.add_argument("file_out", type=str, help="output uvfits file.")
a.add_argument("--overwrite", default=False, action='store_true', help="overwrite output file if it already exists.")
a.add_argument("--verbose", default=False, action='store_true', help="report feedback to stdout.")
# get args
args = a.parse_args()
uv_obj = UVData()
uv_obj.read_uvfits(args.file_in)
large_ant_nums = uv_obj.antenna_numbers[np.where(uv_obj.antenna_numbers > 255)[0]]
new_nums = sorted(list(set(range(256)) - set(uv_obj.antenna_numbers)))
if len(new_nums) < len(large_ant_nums):
raise ValueError('too many antennas in dataset, cannot renumber all below 255')
new_nums = new_nums[-1 * len(large_ant_nums):]
renumber_dict = dict(zip(large_ant_nums, new_nums))
for ant_in, ant_out in renumber_dict.iteritems():
if args.verbose:
print "renumbering {a1} to {a2}".format(a1=ant_in, a2=ant_out)
wh_ant_num = np.where(uv_obj.antenna_numbers == ant_in)[0]
wh_ant1_arr = np.where(uv_obj.ant_1_array == ant_in)[0]
wh_ant2_arr = np.where(uv_obj.ant_2_array == ant_in)[0]
uv_obj.antenna_numbers[wh_ant_num] = ant_out
uv_obj.ant_1_array[wh_ant1_arr] = ant_out
uv_obj.ant_2_array[wh_ant2_arr] = ant_out
blt_inds = np.array(sorted(list(set(wh_ant1_arr.tolist() + wh_ant2_arr.tolist()))))
uv_obj.baseline_array[blt_inds] = uv_obj.antnums_to_baseline(uv_obj.ant_1_array[blt_inds], uv_obj.ant_2_array[blt_inds])
uv_obj.check()
uv_obj.write_uvfits(args.file_out)
|
|
fcb9d55dd5bdaae2ec088b576f9ee78ac81a4b1a
|
tests/fail_arguments_test.py
|
tests/fail_arguments_test.py
|
#!/usr/bin/env python
# encoding: utf-8
"""Fail commands because of arguments test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingArgTest(VimivTestCase):
"""Failing Argument Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
cls.entry = cls.cmdline.entry
def test_args(self):
"""Fail commands because of wrong number of arguments."""
# 0 Arguments allowd
for cmd in ["accept_changes", "autorotate", "center", "clear_thumbs",
"clear_trash", "copy_abspath", "copy_basename", "delete",
"first", "first_lib", "fit", "fit_horiz", "fit_vert",
"focus_library", "fullscreen", "last", "last_lib",
"library", "manipulate", "mark", "mark_all", "mark_between",
"move_up", "next", "next!", "prev", "prev!", "q", "q!",
"reload_lib", "set animation!", "set clipboard!",
"set overzoom!", "set rescale_svg!", "set show_hidden!",
"set statusbar!", "slideshow", "thumbnail",
"unfocus_library", "version"]:
self.fail_arguments(cmd, 1, too_many=True)
# 1 Argument optional
for cmd in ["grow_lib", "set brightness", "set contrast",
"set library_width", "set sharpness", "set slideshow_delay",
"shrink_lib", "zoom_in", "zoom_out"]:
self.fail_arguments(cmd, 2, too_many=True)
# 1 Argument required
for cmd in ["flip", "format", "rotate", "slideshow_delay", "tag_write",
"tag_load", "tag_remove", "zoom_to"]:
self.fail_arguments(cmd, 2, too_many=True)
self.fail_arguments(cmd, 0, too_many=False)
# 2 Arguments required
for cmd in ["alias"]:
self.fail_arguments(cmd, 3, too_many=True)
self.fail_arguments(cmd, 1, too_many=False)
def fail_arguments(self, command, n_args, too_many=True):
"""Fail a command because of too many or too few arguments.
Check for the correct error message.
args:
command: Command to fail.
n_args: Amount of arguments to try.
too_many: If True, n_args are too many for command. Otherwise too
few.
"""
text = ":" + command + " arg" * n_args
self.entry.set_text(text)
self.cmdline.handler(self.entry)
expected = "ERROR: Too many arguments for command" \
if too_many \
else "ERROR: Missing positional arguments for command"
self.assertIn(expected, self.vimiv["statusbar"].left_label.get_text())
if __name__ == '__main__':
main()
|
Add test for failing commands because of n_args
|
Add test for failing commands because of n_args
|
Python
|
mit
|
karlch/vimiv,karlch/vimiv,karlch/vimiv
|
Add test for failing commands because of n_args
|
#!/usr/bin/env python
# encoding: utf-8
"""Fail commands because of arguments test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingArgTest(VimivTestCase):
"""Failing Argument Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
cls.entry = cls.cmdline.entry
def test_args(self):
"""Fail commands because of wrong number of arguments."""
# 0 Arguments allowd
for cmd in ["accept_changes", "autorotate", "center", "clear_thumbs",
"clear_trash", "copy_abspath", "copy_basename", "delete",
"first", "first_lib", "fit", "fit_horiz", "fit_vert",
"focus_library", "fullscreen", "last", "last_lib",
"library", "manipulate", "mark", "mark_all", "mark_between",
"move_up", "next", "next!", "prev", "prev!", "q", "q!",
"reload_lib", "set animation!", "set clipboard!",
"set overzoom!", "set rescale_svg!", "set show_hidden!",
"set statusbar!", "slideshow", "thumbnail",
"unfocus_library", "version"]:
self.fail_arguments(cmd, 1, too_many=True)
# 1 Argument optional
for cmd in ["grow_lib", "set brightness", "set contrast",
"set library_width", "set sharpness", "set slideshow_delay",
"shrink_lib", "zoom_in", "zoom_out"]:
self.fail_arguments(cmd, 2, too_many=True)
# 1 Argument required
for cmd in ["flip", "format", "rotate", "slideshow_delay", "tag_write",
"tag_load", "tag_remove", "zoom_to"]:
self.fail_arguments(cmd, 2, too_many=True)
self.fail_arguments(cmd, 0, too_many=False)
# 2 Arguments required
for cmd in ["alias"]:
self.fail_arguments(cmd, 3, too_many=True)
self.fail_arguments(cmd, 1, too_many=False)
def fail_arguments(self, command, n_args, too_many=True):
"""Fail a command because of too many or too few arguments.
Check for the correct error message.
args:
command: Command to fail.
n_args: Amount of arguments to try.
too_many: If True, n_args are too many for command. Otherwise too
few.
"""
text = ":" + command + " arg" * n_args
self.entry.set_text(text)
self.cmdline.handler(self.entry)
expected = "ERROR: Too many arguments for command" \
if too_many \
else "ERROR: Missing positional arguments for command"
self.assertIn(expected, self.vimiv["statusbar"].left_label.get_text())
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test for failing commands because of n_args<commit_after>
|
#!/usr/bin/env python
# encoding: utf-8
"""Fail commands because of arguments test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingArgTest(VimivTestCase):
"""Failing Argument Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
cls.entry = cls.cmdline.entry
def test_args(self):
"""Fail commands because of wrong number of arguments."""
# 0 Arguments allowd
for cmd in ["accept_changes", "autorotate", "center", "clear_thumbs",
"clear_trash", "copy_abspath", "copy_basename", "delete",
"first", "first_lib", "fit", "fit_horiz", "fit_vert",
"focus_library", "fullscreen", "last", "last_lib",
"library", "manipulate", "mark", "mark_all", "mark_between",
"move_up", "next", "next!", "prev", "prev!", "q", "q!",
"reload_lib", "set animation!", "set clipboard!",
"set overzoom!", "set rescale_svg!", "set show_hidden!",
"set statusbar!", "slideshow", "thumbnail",
"unfocus_library", "version"]:
self.fail_arguments(cmd, 1, too_many=True)
# 1 Argument optional
for cmd in ["grow_lib", "set brightness", "set contrast",
"set library_width", "set sharpness", "set slideshow_delay",
"shrink_lib", "zoom_in", "zoom_out"]:
self.fail_arguments(cmd, 2, too_many=True)
# 1 Argument required
for cmd in ["flip", "format", "rotate", "slideshow_delay", "tag_write",
"tag_load", "tag_remove", "zoom_to"]:
self.fail_arguments(cmd, 2, too_many=True)
self.fail_arguments(cmd, 0, too_many=False)
# 2 Arguments required
for cmd in ["alias"]:
self.fail_arguments(cmd, 3, too_many=True)
self.fail_arguments(cmd, 1, too_many=False)
def fail_arguments(self, command, n_args, too_many=True):
"""Fail a command because of too many or too few arguments.
Check for the correct error message.
args:
command: Command to fail.
n_args: Amount of arguments to try.
too_many: If True, n_args are too many for command. Otherwise too
few.
"""
text = ":" + command + " arg" * n_args
self.entry.set_text(text)
self.cmdline.handler(self.entry)
expected = "ERROR: Too many arguments for command" \
if too_many \
else "ERROR: Missing positional arguments for command"
self.assertIn(expected, self.vimiv["statusbar"].left_label.get_text())
if __name__ == '__main__':
main()
|
Add test for failing commands because of n_args#!/usr/bin/env python
# encoding: utf-8
"""Fail commands because of arguments test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingArgTest(VimivTestCase):
"""Failing Argument Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
cls.entry = cls.cmdline.entry
def test_args(self):
"""Fail commands because of wrong number of arguments."""
# 0 Arguments allowd
for cmd in ["accept_changes", "autorotate", "center", "clear_thumbs",
"clear_trash", "copy_abspath", "copy_basename", "delete",
"first", "first_lib", "fit", "fit_horiz", "fit_vert",
"focus_library", "fullscreen", "last", "last_lib",
"library", "manipulate", "mark", "mark_all", "mark_between",
"move_up", "next", "next!", "prev", "prev!", "q", "q!",
"reload_lib", "set animation!", "set clipboard!",
"set overzoom!", "set rescale_svg!", "set show_hidden!",
"set statusbar!", "slideshow", "thumbnail",
"unfocus_library", "version"]:
self.fail_arguments(cmd, 1, too_many=True)
# 1 Argument optional
for cmd in ["grow_lib", "set brightness", "set contrast",
"set library_width", "set sharpness", "set slideshow_delay",
"shrink_lib", "zoom_in", "zoom_out"]:
self.fail_arguments(cmd, 2, too_many=True)
# 1 Argument required
for cmd in ["flip", "format", "rotate", "slideshow_delay", "tag_write",
"tag_load", "tag_remove", "zoom_to"]:
self.fail_arguments(cmd, 2, too_many=True)
self.fail_arguments(cmd, 0, too_many=False)
# 2 Arguments required
for cmd in ["alias"]:
self.fail_arguments(cmd, 3, too_many=True)
self.fail_arguments(cmd, 1, too_many=False)
def fail_arguments(self, command, n_args, too_many=True):
"""Fail a command because of too many or too few arguments.
Check for the correct error message.
args:
command: Command to fail.
n_args: Amount of arguments to try.
too_many: If True, n_args are too many for command. Otherwise too
few.
"""
text = ":" + command + " arg" * n_args
self.entry.set_text(text)
self.cmdline.handler(self.entry)
expected = "ERROR: Too many arguments for command" \
if too_many \
else "ERROR: Missing positional arguments for command"
self.assertIn(expected, self.vimiv["statusbar"].left_label.get_text())
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add test for failing commands because of n_args<commit_after>#!/usr/bin/env python
# encoding: utf-8
"""Fail commands because of arguments test for vimiv's test suite."""
from unittest import main
from vimiv_testcase import VimivTestCase
class FailingArgTest(VimivTestCase):
"""Failing Argument Tests."""
@classmethod
def setUpClass(cls):
cls.init_test(cls)
cls.cmdline = cls.vimiv["commandline"]
cls.entry = cls.cmdline.entry
def test_args(self):
"""Fail commands because of wrong number of arguments."""
# 0 Arguments allowd
for cmd in ["accept_changes", "autorotate", "center", "clear_thumbs",
"clear_trash", "copy_abspath", "copy_basename", "delete",
"first", "first_lib", "fit", "fit_horiz", "fit_vert",
"focus_library", "fullscreen", "last", "last_lib",
"library", "manipulate", "mark", "mark_all", "mark_between",
"move_up", "next", "next!", "prev", "prev!", "q", "q!",
"reload_lib", "set animation!", "set clipboard!",
"set overzoom!", "set rescale_svg!", "set show_hidden!",
"set statusbar!", "slideshow", "thumbnail",
"unfocus_library", "version"]:
self.fail_arguments(cmd, 1, too_many=True)
# 1 Argument optional
for cmd in ["grow_lib", "set brightness", "set contrast",
"set library_width", "set sharpness", "set slideshow_delay",
"shrink_lib", "zoom_in", "zoom_out"]:
self.fail_arguments(cmd, 2, too_many=True)
# 1 Argument required
for cmd in ["flip", "format", "rotate", "slideshow_delay", "tag_write",
"tag_load", "tag_remove", "zoom_to"]:
self.fail_arguments(cmd, 2, too_many=True)
self.fail_arguments(cmd, 0, too_many=False)
# 2 Arguments required
for cmd in ["alias"]:
self.fail_arguments(cmd, 3, too_many=True)
self.fail_arguments(cmd, 1, too_many=False)
def fail_arguments(self, command, n_args, too_many=True):
"""Fail a command because of too many or too few arguments.
Check for the correct error message.
args:
command: Command to fail.
n_args: Amount of arguments to try.
too_many: If True, n_args are too many for command. Otherwise too
few.
"""
text = ":" + command + " arg" * n_args
self.entry.set_text(text)
self.cmdline.handler(self.entry)
expected = "ERROR: Too many arguments for command" \
if too_many \
else "ERROR: Missing positional arguments for command"
self.assertIn(expected, self.vimiv["statusbar"].left_label.get_text())
if __name__ == '__main__':
main()
|
|
99bf15563d495f5772d82dae4bf6511b88c2c45b
|
Algebra_Geometry/linear_algebra.py
|
Algebra_Geometry/linear_algebra.py
|
#!/usr/bin/python
# Linear Algebra with Python
import numpy as np
def numpy_show():
# create a column vector
col_vec = np.array([[1], [2]])
print "column vector"
print col_vec
# create a row vector
row_vec = np.array([[1, 2]])
print "row vector"
print row_vec
# create a matrix
mat = np.array([[1, 2], [3, 4]])
print "matrix"
print mat
# inspect dimensions
print "row vector dimensions", row_vec.ndim
shape = row_vec.shape
print "row vector rows", shape[0], "columns", shape[1]
print "matrix dimensions", mat.ndim
shape = mat.shape
print "matrix rows", shape[0], "columns", shape[1]
# transpose
vec_t = row_vec.transpose() # or row_vec.T
print "transposed vector"
print vec_t
mat_t = mat.transpose() # or mat.T
print "transposed matrix"
print mat_t
a = np.array([[2], [-4], [1]])
b = np.array([[2], [1], [-2]])
# addition
print "a + b"
print a + b
# subtraction
print "a - b"
print a - b
# scalar multiplication
print "1.2 * a"
print 1.2 * a
# element wise multiplication
print "a * b"
print a * b
# vector scalar product
print "a . b"
print np.dot(a.transpose(), b)
# vector cross product
print "a x b"
print np.cross(a, b, axis=0) # or np.cross(a.T, b.T).T
identity = np.array([[1, 0], [0, 1]])
# matrix vector product
print "identity . col_vec"
print np.dot(identity, col_vec)
# matrix product
print "identity . mat"
print np.dot(identity, mat)
numpy_show()
|
Add Linear Algebra python Example
|
Add Linear Algebra python Example
|
Python
|
mit
|
nachovizzo/AUTONAVx,nachovizzo/AUTONAVx,nachovizzo/AUTONAVx
|
Add Linear Algebra python Example
|
#!/usr/bin/python
# Linear Algebra with Python
import numpy as np
def numpy_show():
# create a column vector
col_vec = np.array([[1], [2]])
print "column vector"
print col_vec
# create a row vector
row_vec = np.array([[1, 2]])
print "row vector"
print row_vec
# create a matrix
mat = np.array([[1, 2], [3, 4]])
print "matrix"
print mat
# inspect dimensions
print "row vector dimensions", row_vec.ndim
shape = row_vec.shape
print "row vector rows", shape[0], "columns", shape[1]
print "matrix dimensions", mat.ndim
shape = mat.shape
print "matrix rows", shape[0], "columns", shape[1]
# transpose
vec_t = row_vec.transpose() # or row_vec.T
print "transposed vector"
print vec_t
mat_t = mat.transpose() # or mat.T
print "transposed matrix"
print mat_t
a = np.array([[2], [-4], [1]])
b = np.array([[2], [1], [-2]])
# addition
print "a + b"
print a + b
# subtraction
print "a - b"
print a - b
# scalar multiplication
print "1.2 * a"
print 1.2 * a
# element wise multiplication
print "a * b"
print a * b
# vector scalar product
print "a . b"
print np.dot(a.transpose(), b)
# vector cross product
print "a x b"
print np.cross(a, b, axis=0) # or np.cross(a.T, b.T).T
identity = np.array([[1, 0], [0, 1]])
# matrix vector product
print "identity . col_vec"
print np.dot(identity, col_vec)
# matrix product
print "identity . mat"
print np.dot(identity, mat)
numpy_show()
|
<commit_before><commit_msg>Add Linear Algebra python Example<commit_after>
|
#!/usr/bin/python
# Linear Algebra with Python
import numpy as np
def numpy_show():
# create a column vector
col_vec = np.array([[1], [2]])
print "column vector"
print col_vec
# create a row vector
row_vec = np.array([[1, 2]])
print "row vector"
print row_vec
# create a matrix
mat = np.array([[1, 2], [3, 4]])
print "matrix"
print mat
# inspect dimensions
print "row vector dimensions", row_vec.ndim
shape = row_vec.shape
print "row vector rows", shape[0], "columns", shape[1]
print "matrix dimensions", mat.ndim
shape = mat.shape
print "matrix rows", shape[0], "columns", shape[1]
# transpose
vec_t = row_vec.transpose() # or row_vec.T
print "transposed vector"
print vec_t
mat_t = mat.transpose() # or mat.T
print "transposed matrix"
print mat_t
a = np.array([[2], [-4], [1]])
b = np.array([[2], [1], [-2]])
# addition
print "a + b"
print a + b
# subtraction
print "a - b"
print a - b
# scalar multiplication
print "1.2 * a"
print 1.2 * a
# element wise multiplication
print "a * b"
print a * b
# vector scalar product
print "a . b"
print np.dot(a.transpose(), b)
# vector cross product
print "a x b"
print np.cross(a, b, axis=0) # or np.cross(a.T, b.T).T
identity = np.array([[1, 0], [0, 1]])
# matrix vector product
print "identity . col_vec"
print np.dot(identity, col_vec)
# matrix product
print "identity . mat"
print np.dot(identity, mat)
numpy_show()
|
Add Linear Algebra python Example#!/usr/bin/python
# Linear Algebra with Python
import numpy as np
def numpy_show():
# create a column vector
col_vec = np.array([[1], [2]])
print "column vector"
print col_vec
# create a row vector
row_vec = np.array([[1, 2]])
print "row vector"
print row_vec
# create a matrix
mat = np.array([[1, 2], [3, 4]])
print "matrix"
print mat
# inspect dimensions
print "row vector dimensions", row_vec.ndim
shape = row_vec.shape
print "row vector rows", shape[0], "columns", shape[1]
print "matrix dimensions", mat.ndim
shape = mat.shape
print "matrix rows", shape[0], "columns", shape[1]
# transpose
vec_t = row_vec.transpose() # or row_vec.T
print "transposed vector"
print vec_t
mat_t = mat.transpose() # or mat.T
print "transposed matrix"
print mat_t
a = np.array([[2], [-4], [1]])
b = np.array([[2], [1], [-2]])
# addition
print "a + b"
print a + b
# subtraction
print "a - b"
print a - b
# scalar multiplication
print "1.2 * a"
print 1.2 * a
# element wise multiplication
print "a * b"
print a * b
# vector scalar product
print "a . b"
print np.dot(a.transpose(), b)
# vector cross product
print "a x b"
print np.cross(a, b, axis=0) # or np.cross(a.T, b.T).T
identity = np.array([[1, 0], [0, 1]])
# matrix vector product
print "identity . col_vec"
print np.dot(identity, col_vec)
# matrix product
print "identity . mat"
print np.dot(identity, mat)
numpy_show()
|
<commit_before><commit_msg>Add Linear Algebra python Example<commit_after>#!/usr/bin/python
# Linear Algebra with Python
import numpy as np
def numpy_show():
# create a column vector
col_vec = np.array([[1], [2]])
print "column vector"
print col_vec
# create a row vector
row_vec = np.array([[1, 2]])
print "row vector"
print row_vec
# create a matrix
mat = np.array([[1, 2], [3, 4]])
print "matrix"
print mat
# inspect dimensions
print "row vector dimensions", row_vec.ndim
shape = row_vec.shape
print "row vector rows", shape[0], "columns", shape[1]
print "matrix dimensions", mat.ndim
shape = mat.shape
print "matrix rows", shape[0], "columns", shape[1]
# transpose
vec_t = row_vec.transpose() # or row_vec.T
print "transposed vector"
print vec_t
mat_t = mat.transpose() # or mat.T
print "transposed matrix"
print mat_t
a = np.array([[2], [-4], [1]])
b = np.array([[2], [1], [-2]])
# addition
print "a + b"
print a + b
# subtraction
print "a - b"
print a - b
# scalar multiplication
print "1.2 * a"
print 1.2 * a
# element wise multiplication
print "a * b"
print a * b
# vector scalar product
print "a . b"
print np.dot(a.transpose(), b)
# vector cross product
print "a x b"
print np.cross(a, b, axis=0) # or np.cross(a.T, b.T).T
identity = np.array([[1, 0], [0, 1]])
# matrix vector product
print "identity . col_vec"
print np.dot(identity, col_vec)
# matrix product
print "identity . mat"
print np.dot(identity, mat)
numpy_show()
|
|
7f22d9687563660852926333a9c8bacaa31df591
|
open_humans/management/commands/stats_new.py
|
open_humans/management/commands/stats_new.py
|
# -*- coding: utf-8 -*-
import arrow
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.models import Count
from termcolor import colored
from data_import.models import DataFile
from open_humans.models import Member
from private_sharing.models import DataRequestProject, DataRequestProjectMember
UserModel = get_user_model()
class Command(BaseCommand):
help = 'Statistics on the site'
args = ''
def add_arguments(self, parser):
parser.add_argument('--date', type=str,
help='stats as of this date')
def handle(self, *args, **options):
cutoff = arrow.get(options['date']).datetime
members = Member.objects.filter(user__date_joined__lte=cutoff)
members_with_data = members.annotate(
datafiles_count=Count('user__datafiles')).filter(
datafiles_count__gte=1)
files = DataFile.objects.exclude(archived__lte=cutoff).filter(
created__lte=cutoff)
projects_made = DataRequestProject.objects.filter(created__lte=cutoff)
projects_approved = projects_made.filter(approved=True)
data_connections = set([(
df['source'], df['user__username']) for
df in DataFile.objects.exclude(
archived__lte=cutoff).filter(
created__lte=cutoff).values('user__username', 'source')])
proj_connections = DataRequestProjectMember.objects.exclude(
project__approved=False).exclude(joined=False).exclude(
authorized=False).filter(created__lte=cutoff)
print("Members: {}".format(members.count()))
print("Members with any data connections: {}".format(
members_with_data.count()))
print("Data connections: {}".format(len(data_connections)))
print("Project connections: {}".format(proj_connections.count()))
print("Projects drafted: {}".format(projects_made.count()))
print("Projects approved: {}".format(projects_approved.count()))
|
Add new stats management command
|
Add new stats management command
|
Python
|
mit
|
PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans,PersonalGenomesOrg/open-humans,OpenHumans/open-humans,OpenHumans/open-humans,PersonalGenomesOrg/open-humans
|
Add new stats management command
|
# -*- coding: utf-8 -*-
import arrow
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.models import Count
from termcolor import colored
from data_import.models import DataFile
from open_humans.models import Member
from private_sharing.models import DataRequestProject, DataRequestProjectMember
UserModel = get_user_model()
class Command(BaseCommand):
help = 'Statistics on the site'
args = ''
def add_arguments(self, parser):
parser.add_argument('--date', type=str,
help='stats as of this date')
def handle(self, *args, **options):
cutoff = arrow.get(options['date']).datetime
members = Member.objects.filter(user__date_joined__lte=cutoff)
members_with_data = members.annotate(
datafiles_count=Count('user__datafiles')).filter(
datafiles_count__gte=1)
files = DataFile.objects.exclude(archived__lte=cutoff).filter(
created__lte=cutoff)
projects_made = DataRequestProject.objects.filter(created__lte=cutoff)
projects_approved = projects_made.filter(approved=True)
data_connections = set([(
df['source'], df['user__username']) for
df in DataFile.objects.exclude(
archived__lte=cutoff).filter(
created__lte=cutoff).values('user__username', 'source')])
proj_connections = DataRequestProjectMember.objects.exclude(
project__approved=False).exclude(joined=False).exclude(
authorized=False).filter(created__lte=cutoff)
print("Members: {}".format(members.count()))
print("Members with any data connections: {}".format(
members_with_data.count()))
print("Data connections: {}".format(len(data_connections)))
print("Project connections: {}".format(proj_connections.count()))
print("Projects drafted: {}".format(projects_made.count()))
print("Projects approved: {}".format(projects_approved.count()))
|
<commit_before><commit_msg>Add new stats management command<commit_after>
|
# -*- coding: utf-8 -*-
import arrow
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.models import Count
from termcolor import colored
from data_import.models import DataFile
from open_humans.models import Member
from private_sharing.models import DataRequestProject, DataRequestProjectMember
UserModel = get_user_model()
class Command(BaseCommand):
help = 'Statistics on the site'
args = ''
def add_arguments(self, parser):
parser.add_argument('--date', type=str,
help='stats as of this date')
def handle(self, *args, **options):
cutoff = arrow.get(options['date']).datetime
members = Member.objects.filter(user__date_joined__lte=cutoff)
members_with_data = members.annotate(
datafiles_count=Count('user__datafiles')).filter(
datafiles_count__gte=1)
files = DataFile.objects.exclude(archived__lte=cutoff).filter(
created__lte=cutoff)
projects_made = DataRequestProject.objects.filter(created__lte=cutoff)
projects_approved = projects_made.filter(approved=True)
data_connections = set([(
df['source'], df['user__username']) for
df in DataFile.objects.exclude(
archived__lte=cutoff).filter(
created__lte=cutoff).values('user__username', 'source')])
proj_connections = DataRequestProjectMember.objects.exclude(
project__approved=False).exclude(joined=False).exclude(
authorized=False).filter(created__lte=cutoff)
print("Members: {}".format(members.count()))
print("Members with any data connections: {}".format(
members_with_data.count()))
print("Data connections: {}".format(len(data_connections)))
print("Project connections: {}".format(proj_connections.count()))
print("Projects drafted: {}".format(projects_made.count()))
print("Projects approved: {}".format(projects_approved.count()))
|
Add new stats management command# -*- coding: utf-8 -*-
import arrow
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.models import Count
from termcolor import colored
from data_import.models import DataFile
from open_humans.models import Member
from private_sharing.models import DataRequestProject, DataRequestProjectMember
UserModel = get_user_model()
class Command(BaseCommand):
help = 'Statistics on the site'
args = ''
def add_arguments(self, parser):
parser.add_argument('--date', type=str,
help='stats as of this date')
def handle(self, *args, **options):
cutoff = arrow.get(options['date']).datetime
members = Member.objects.filter(user__date_joined__lte=cutoff)
members_with_data = members.annotate(
datafiles_count=Count('user__datafiles')).filter(
datafiles_count__gte=1)
files = DataFile.objects.exclude(archived__lte=cutoff).filter(
created__lte=cutoff)
projects_made = DataRequestProject.objects.filter(created__lte=cutoff)
projects_approved = projects_made.filter(approved=True)
data_connections = set([(
df['source'], df['user__username']) for
df in DataFile.objects.exclude(
archived__lte=cutoff).filter(
created__lte=cutoff).values('user__username', 'source')])
proj_connections = DataRequestProjectMember.objects.exclude(
project__approved=False).exclude(joined=False).exclude(
authorized=False).filter(created__lte=cutoff)
print("Members: {}".format(members.count()))
print("Members with any data connections: {}".format(
members_with_data.count()))
print("Data connections: {}".format(len(data_connections)))
print("Project connections: {}".format(proj_connections.count()))
print("Projects drafted: {}".format(projects_made.count()))
print("Projects approved: {}".format(projects_approved.count()))
|
<commit_before><commit_msg>Add new stats management command<commit_after># -*- coding: utf-8 -*-
import arrow
from django.contrib.auth import get_user_model
from django.core.management.base import BaseCommand
from django.db.models import Count
from termcolor import colored
from data_import.models import DataFile
from open_humans.models import Member
from private_sharing.models import DataRequestProject, DataRequestProjectMember
UserModel = get_user_model()
class Command(BaseCommand):
help = 'Statistics on the site'
args = ''
def add_arguments(self, parser):
parser.add_argument('--date', type=str,
help='stats as of this date')
def handle(self, *args, **options):
cutoff = arrow.get(options['date']).datetime
members = Member.objects.filter(user__date_joined__lte=cutoff)
members_with_data = members.annotate(
datafiles_count=Count('user__datafiles')).filter(
datafiles_count__gte=1)
files = DataFile.objects.exclude(archived__lte=cutoff).filter(
created__lte=cutoff)
projects_made = DataRequestProject.objects.filter(created__lte=cutoff)
projects_approved = projects_made.filter(approved=True)
data_connections = set([(
df['source'], df['user__username']) for
df in DataFile.objects.exclude(
archived__lte=cutoff).filter(
created__lte=cutoff).values('user__username', 'source')])
proj_connections = DataRequestProjectMember.objects.exclude(
project__approved=False).exclude(joined=False).exclude(
authorized=False).filter(created__lte=cutoff)
print("Members: {}".format(members.count()))
print("Members with any data connections: {}".format(
members_with_data.count()))
print("Data connections: {}".format(len(data_connections)))
print("Project connections: {}".format(proj_connections.count()))
print("Projects drafted: {}".format(projects_made.count()))
print("Projects approved: {}".format(projects_approved.count()))
|
|
8a1ba0d7200ea72e20ab1db5a62c3e36ce7f8489
|
zephyr/management/commands/print_initial_password.py
|
zephyr/management/commands/print_initial_password.py
|
from django.core.management.base import BaseCommand
from zephyr.lib.initial_password import initial_password
class Command(BaseCommand):
help = "Print the initial password for accounts as created by populate_db"
def handle(self, *args, **options):
print
for email in args:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
continue
print '%-30s %-16s' % (email, initial_password(email))
|
Add a management command to print the initial password for an account
|
Add a management command to print the initial password for an account
(imported from commit 0a2b7d8215961801dbd24d9af89785e857b9ba14)
|
Python
|
apache-2.0
|
souravbadami/zulip,tbutter/zulip,PaulPetring/zulip,he15his/zulip,avastu/zulip,littledogboy/zulip,glovebx/zulip,hafeez3000/zulip,DazWorrall/zulip,littledogboy/zulip,tdr130/zulip,zwily/zulip,mdavid/zulip,MayB/zulip,tommyip/zulip,ashwinirudrappa/zulip,niftynei/zulip,jackrzhang/zulip,MariaFaBella85/zulip,brockwhittaker/zulip,dhcrzf/zulip,levixie/zulip,LeeRisk/zulip,hj3938/zulip,Suninus/zulip,ashwinirudrappa/zulip,tommyip/zulip,gkotian/zulip,rishig/zulip,niftynei/zulip,kou/zulip,jphilipsen05/zulip,Diptanshu8/zulip,tommyip/zulip,vakila/zulip,itnihao/zulip,willingc/zulip,peguin40/zulip,voidException/zulip,PaulPetring/zulip,suxinde2009/zulip,gigawhitlocks/zulip,hayderimran7/zulip,Qgap/zulip,developerfm/zulip,he15his/zulip,LeeRisk/zulip,alliejones/zulip,proliming/zulip,deer-hope/zulip,zhaoweigg/zulip,natanovia/zulip,KJin99/zulip,codeKonami/zulip,jessedhillon/zulip,krtkmj/zulip,jphilipsen05/zulip,gkotian/zulip,developerfm/zulip,praveenaki/zulip,eastlhu/zulip,Juanvulcano/zulip,verma-varsha/zulip,MayB/zulip,Galexrt/zulip,kaiyuanheshang/zulip,proliming/zulip,akuseru/zulip,jonesgithub/zulip,seapasulli/zulip,dotcool/zulip,EasonYi/zulip,samatdav/zulip,zachallaun/zulip,jimmy54/zulip,fw1121/zulip,jimmy54/zulip,wdaher/zulip,SmartPeople/zulip,joshisa/zulip,hj3938/zulip,luyifan/zulip,souravbadami/zulip,hengqujushi/zulip,dnmfarrell/zulip,jerryge/zulip,zacps/zulip,suxinde2009/zulip,jackrzhang/zulip,xuxiao/zulip,dhcrzf/zulip,natanovia/zulip,hayderimran7/zulip,zofuthan/zulip,schatt/zulip,shubhamdhama/zulip,timabbott/zulip,akuseru/zulip,mansilladev/zulip,souravbadami/zulip,ahmadassaf/zulip,bowlofstew/zulip,christi3k/zulip,voidException/zulip,mohsenSy/zulip,pradiptad/zulip,RobotCaleb/zulip,akuseru/zulip,timabbott/zulip,SmartPeople/zulip,sup95/zulip,xuxiao/zulip,xuanhan863/zulip,niftynei/zulip,luyifan/zulip,zhaoweigg/zulip,reyha/zulip,kaiyuanheshang/zulip,themass/zulip,yocome/zulip,arpith/zulip,vikas-parashar/zulip,LeeRisk/zulip,nicholasbs/zulip,peguin40/zulip,Juanvulcano/zulip,tdr130/zulip,yocome/zulip,PhilSk/zulip,aliceriot/zulip,bluesea/zulip,willingc/zulip,ikasumiwt/zulip,hackerkid/zulip,ericzhou2008/zulip,aps-sids/zulip,ahmadassaf/zulip,MayB/zulip,ikasumiwt/zulip,guiquanz/zulip,rht/zulip,natanovia/zulip,willingc/zulip,saitodisse/zulip,tdr130/zulip,brockwhittaker/zulip,timabbott/zulip,mdavid/zulip,technicalpickles/zulip,bitemyapp/zulip,mohsenSy/zulip,verma-varsha/zulip,Batterfii/zulip,jonesgithub/zulip,lfranchi/zulip,ipernet/zulip,arpith/zulip,RobotCaleb/zulip,zacps/zulip,deer-hope/zulip,wdaher/zulip,atomic-labs/zulip,armooo/zulip,AZtheAsian/zulip,guiquanz/zulip,ufosky-server/zulip,blaze225/zulip,Jianchun1/zulip,glovebx/zulip,kaiyuanheshang/zulip,armooo/zulip,ApsOps/zulip,easyfmxu/zulip,kaiyuanheshang/zulip,bitemyapp/zulip,yocome/zulip,ApsOps/zulip,noroot/zulip,brainwane/zulip,mansilladev/zulip,EasonYi/zulip,wangdeshui/zulip,aakash-cr7/zulip,PhilSk/zulip,dxq-git/zulip,atomic-labs/zulip,SmartPeople/zulip,amyliu345/zulip,levixie/zulip,paxapy/zulip,ericzhou2008/zulip,Frouk/zulip,alliejones/zulip,udxxabp/zulip,hustlzp/zulip,jessedhillon/zulip,joyhchen/zulip,bitemyapp/zulip,Drooids/zulip,jainayush975/zulip,Qgap/zulip,rht/zulip,Suninus/zulip,technicalpickles/zulip,dnmfarrell/zulip,codeKonami/zulip,Vallher/zulip,joshisa/zulip,ryanbackman/zulip,MariaFaBella85/zulip,MariaFaBella85/zulip,hj3938/zulip,susansls/zulip,bssrdf/zulip,stamhe/zulip,seapasulli/zulip,gigawhitlocks/zulip,gkotian/zulip,DazWorrall/zulip,bssrdf/zulip,stamhe/zulip,ericzhou2008/zulip,brockwhittaker/zulip,amallia/zulip,timabbott/zulip,babbage/zulip,reyha/zulip,hengqujushi/zulip,developerfm/zulip,grave-w-grave/zulip,bowlofstew/zulip,alliejones/zulip,sup95/zulip,jimmy54/zulip,susansls/zulip,technicalpickles/zulip,verma-varsha/zulip,tiansiyuan/zulip,wavelets/zulip,atomic-labs/zulip,m1ssou/zulip,amanharitsh123/zulip,arpith/zulip,ufosky-server/zulip,adnanh/zulip,RobotCaleb/zulip,bssrdf/zulip,andersk/zulip,vabs22/zulip,DazWorrall/zulip,aps-sids/zulip,karamcnair/zulip,grave-w-grave/zulip,dotcool/zulip,brockwhittaker/zulip,amallia/zulip,eeshangarg/zulip,alliejones/zulip,JanzTam/zulip,deer-hope/zulip,atomic-labs/zulip,glovebx/zulip,TigorC/zulip,Qgap/zulip,MariaFaBella85/zulip,PaulPetring/zulip,karamcnair/zulip,showell/zulip,nicholasbs/zulip,karamcnair/zulip,dhcrzf/zulip,gigawhitlocks/zulip,DazWorrall/zulip,hengqujushi/zulip,Cheppers/zulip,JanzTam/zulip,dotcool/zulip,dxq-git/zulip,yuvipanda/zulip,jerryge/zulip,luyifan/zulip,xuxiao/zulip,thomasboyt/zulip,amanharitsh123/zulip,hackerkid/zulip,ipernet/zulip,yuvipanda/zulip,nicholasbs/zulip,sonali0901/zulip,isht3/zulip,mohsenSy/zulip,Batterfii/zulip,aps-sids/zulip,hafeez3000/zulip,peguin40/zulip,kokoar/zulip,wweiradio/zulip,lfranchi/zulip,he15his/zulip,easyfmxu/zulip,zachallaun/zulip,ipernet/zulip,rht/zulip,jackrzhang/zulip,ahmadassaf/zulip,LAndreas/zulip,peiwei/zulip,tiansiyuan/zulip,itnihao/zulip,johnnygaddarr/zulip,hackerkid/zulip,krtkmj/zulip,cosmicAsymmetry/zulip,qq1012803704/zulip,ryansnowboarder/zulip,qq1012803704/zulip,mahim97/zulip,Galexrt/zulip,jonesgithub/zulip,vakila/zulip,esander91/zulip,ikasumiwt/zulip,PaulPetring/zulip,jerryge/zulip,jrowan/zulip,Galexrt/zulip,zachallaun/zulip,ahmadassaf/zulip,brockwhittaker/zulip,bowlofstew/zulip,so0k/zulip,bitemyapp/zulip,kou/zulip,firstblade/zulip,dattatreya303/zulip,atomic-labs/zulip,dhcrzf/zulip,wangdeshui/zulip,jonesgithub/zulip,swinghu/zulip,JanzTam/zulip,littledogboy/zulip,karamcnair/zulip,LAndreas/zulip,bastianh/zulip,nicholasbs/zulip,thomasboyt/zulip,jonesgithub/zulip,samatdav/zulip,wavelets/zulip,developerfm/zulip,Batterfii/zulip,akuseru/zulip,joyhchen/zulip,tiansiyuan/zulip,susansls/zulip,armooo/zulip,johnnygaddarr/zulip,KingxBanana/zulip,JPJPJPOPOP/zulip,ashwinirudrappa/zulip,dotcool/zulip,krtkmj/zulip,LAndreas/zulip,hustlzp/zulip,aliceriot/zulip,firstblade/zulip,kou/zulip,littledogboy/zulip,mohsenSy/zulip,adnanh/zulip,tbutter/zulip,kou/zulip,dxq-git/zulip,itnihao/zulip,Jianchun1/zulip,zofuthan/zulip,pradiptad/zulip,Jianchun1/zulip,schatt/zulip,ApsOps/zulip,alliejones/zulip,sharmaeklavya2/zulip,schatt/zulip,mdavid/zulip,johnnygaddarr/zulip,jessedhillon/zulip,voidException/zulip,samatdav/zulip,amanharitsh123/zulip,kaiyuanheshang/zulip,ahmadassaf/zulip,kokoar/zulip,dnmfarrell/zulip,jphilipsen05/zulip,aliceriot/zulip,showell/zulip,hayderimran7/zulip,vakila/zulip,shaunstanislaus/zulip,calvinleenyc/zulip,atomic-labs/zulip,voidException/zulip,paxapy/zulip,udxxabp/zulip,seapasulli/zulip,yuvipanda/zulip,christi3k/zulip,mahim97/zulip,tiansiyuan/zulip,Galexrt/zulip,wdaher/zulip,brainwane/zulip,showell/zulip,aliceriot/zulip,shubhamdhama/zulip,tbutter/zulip,shrikrishnaholla/zulip,hafeez3000/zulip,xuxiao/zulip,aps-sids/zulip,KJin99/zulip,so0k/zulip,dawran6/zulip,gkotian/zulip,j831/zulip,Juanvulcano/zulip,kokoar/zulip,jimmy54/zulip,dxq-git/zulip,peiwei/zulip,pradiptad/zulip,itnihao/zulip,shaunstanislaus/zulip,KJin99/zulip,jainayush975/zulip,Cheppers/zulip,wavelets/zulip,Jianchun1/zulip,wangdeshui/zulip,LAndreas/zulip,SmartPeople/zulip,krtkmj/zulip,praveenaki/zulip,karamcnair/zulip,zwily/zulip,synicalsyntax/zulip,ikasumiwt/zulip,mansilladev/zulip,arpith/zulip,technicalpickles/zulip,dnmfarrell/zulip,DazWorrall/zulip,AZtheAsian/zulip,shrikrishnaholla/zulip,xuanhan863/zulip,natanovia/zulip,hackerkid/zulip,dnmfarrell/zulip,wavelets/zulip,grave-w-grave/zulip,he15his/zulip,bitemyapp/zulip,blaze225/zulip,schatt/zulip,zorojean/zulip,vaidap/zulip,grave-w-grave/zulip,johnnygaddarr/zulip,Gabriel0402/zulip,jrowan/zulip,schatt/zulip,sharmaeklavya2/zulip,RobotCaleb/zulip,johnnygaddarr/zulip,krtkmj/zulip,yuvipanda/zulip,cosmicAsymmetry/zulip,Suninus/zulip,dotcool/zulip,reyha/zulip,noroot/zulip,hayderimran7/zulip,kaiyuanheshang/zulip,jimmy54/zulip,jonesgithub/zulip,bluesea/zulip,peiwei/zulip,moria/zulip,moria/zulip,arpitpanwar/zulip,ryansnowboarder/zulip,hafeez3000/zulip,codeKonami/zulip,MariaFaBella85/zulip,JPJPJPOPOP/zulip,KJin99/zulip,thomasboyt/zulip,zorojean/zulip,hustlzp/zulip,xuanhan863/zulip,vakila/zulip,cosmicAsymmetry/zulip,easyfmxu/zulip,suxinde2009/zulip,dotcool/zulip,johnny9/zulip,shubhamdhama/zulip,seapasulli/zulip,tiansiyuan/zulip,noroot/zulip,codeKonami/zulip,vikas-parashar/zulip,Jianchun1/zulip,christi3k/zulip,easyfmxu/zulip,KingxBanana/zulip,souravbadami/zulip,Vallher/zulip,Drooids/zulip,Cheppers/zulip,moria/zulip,timabbott/zulip,adnanh/zulip,he15his/zulip,Drooids/zulip,esander91/zulip,levixie/zulip,luyifan/zulip,punchagan/zulip,avastu/zulip,ericzhou2008/zulip,littledogboy/zulip,verma-varsha/zulip,huangkebo/zulip,zulip/zulip,zulip/zulip,saitodisse/zulip,Qgap/zulip,zulip/zulip,tommyip/zulip,jessedhillon/zulip,Gabriel0402/zulip,JPJPJPOPOP/zulip,isht3/zulip,hustlzp/zulip,adnanh/zulip,firstblade/zulip,sonali0901/zulip,EasonYi/zulip,sonali0901/zulip,glovebx/zulip,ahmadassaf/zulip,amyliu345/zulip,kokoar/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,dxq-git/zulip,hustlzp/zulip,zwily/zulip,ahmadassaf/zulip,ryanbackman/zulip,moria/zulip,calvinleenyc/zulip,dattatreya303/zulip,blaze225/zulip,andersk/zulip,ikasumiwt/zulip,so0k/zulip,adnanh/zulip,bssrdf/zulip,willingc/zulip,jeffcao/zulip,arpitpanwar/zulip,m1ssou/zulip,lfranchi/zulip,so0k/zulip,wangdeshui/zulip,hayderimran7/zulip,tommyip/zulip,gkotian/zulip,jessedhillon/zulip,bastianh/zulip,jainayush975/zulip,hafeez3000/zulip,vakila/zulip,aakash-cr7/zulip,fw1121/zulip,yocome/zulip,dxq-git/zulip,firstblade/zulip,KJin99/zulip,calvinleenyc/zulip,vikas-parashar/zulip,joshisa/zulip,Frouk/zulip,thomasboyt/zulip,tdr130/zulip,jerryge/zulip,noroot/zulip,mdavid/zulip,mahim97/zulip,ipernet/zulip,guiquanz/zulip,fw1121/zulip,jimmy54/zulip,Frouk/zulip,amyliu345/zulip,bastianh/zulip,stamhe/zulip,xuxiao/zulip,jphilipsen05/zulip,punchagan/zulip,ryansnowboarder/zulip,stamhe/zulip,andersk/zulip,MayB/zulip,tbutter/zulip,jackrzhang/zulip,shubhamdhama/zulip,peguin40/zulip,schatt/zulip,dhcrzf/zulip,krtkmj/zulip,joshisa/zulip,verma-varsha/zulip,zulip/zulip,babbage/zulip,jeffcao/zulip,guiquanz/zulip,lfranchi/zulip,ericzhou2008/zulip,Vallher/zulip,littledogboy/zulip,eastlhu/zulip,samatdav/zulip,swinghu/zulip,punchagan/zulip,xuxiao/zulip,proliming/zulip,bowlofstew/zulip,karamcnair/zulip,reyha/zulip,shubhamdhama/zulip,voidException/zulip,bssrdf/zulip,saitodisse/zulip,rht/zulip,saitodisse/zulip,themass/zulip,umkay/zulip,wdaher/zulip,mohsenSy/zulip,amallia/zulip,brockwhittaker/zulip,bowlofstew/zulip,vaidap/zulip,itnihao/zulip,swinghu/zulip,Frouk/zulip,swinghu/zulip,zorojean/zulip,amanharitsh123/zulip,ryansnowboarder/zulip,noroot/zulip,Suninus/zulip,ryansnowboarder/zulip,deer-hope/zulip,codeKonami/zulip,punchagan/zulip,vaidap/zulip,peiwei/zulip,Gabriel0402/zulip,Gabriel0402/zulip,jackrzhang/zulip,ikasumiwt/zulip,AZtheAsian/zulip,tdr130/zulip,qq1012803704/zulip,proliming/zulip,isht3/zulip,bastianh/zulip,AZtheAsian/zulip,stamhe/zulip,calvinleenyc/zulip,voidException/zulip,developerfm/zulip,Juanvulcano/zulip,ApsOps/zulip,cosmicAsymmetry/zulip,swinghu/zulip,kou/zulip,PhilSk/zulip,ericzhou2008/zulip,LeeRisk/zulip,joyhchen/zulip,wweiradio/zulip,rishig/zulip,jainayush975/zulip,firstblade/zulip,shrikrishnaholla/zulip,joyhchen/zulip,shaunstanislaus/zulip,gigawhitlocks/zulip,esander91/zulip,luyifan/zulip,dawran6/zulip,ashwinirudrappa/zulip,samatdav/zulip,sharmaeklavya2/zulip,kokoar/zulip,Drooids/zulip,synicalsyntax/zulip,PaulPetring/zulip,eastlhu/zulip,krtkmj/zulip,pradiptad/zulip,timabbott/zulip,jrowan/zulip,nicholasbs/zulip,tiansiyuan/zulip,hafeez3000/zulip,vikas-parashar/zulip,eeshangarg/zulip,brainwane/zulip,kokoar/zulip,zachallaun/zulip,akuseru/zulip,wdaher/zulip,Batterfii/zulip,DazWorrall/zulip,paxapy/zulip,christi3k/zulip,JanzTam/zulip,ryanbackman/zulip,sharmaeklavya2/zulip,TigorC/zulip,shaunstanislaus/zulip,reyha/zulip,ashwinirudrappa/zulip,he15his/zulip,rht/zulip,KJin99/zulip,bitemyapp/zulip,sonali0901/zulip,ApsOps/zulip,mdavid/zulip,calvinleenyc/zulip,sup95/zulip,hj3938/zulip,mansilladev/zulip,jphilipsen05/zulip,hj3938/zulip,dwrpayne/zulip,jerryge/zulip,ryansnowboarder/zulip,wdaher/zulip,jeffcao/zulip,deer-hope/zulip,thomasboyt/zulip,armooo/zulip,alliejones/zulip,SmartPeople/zulip,qq1012803704/zulip,itnihao/zulip,natanovia/zulip,praveenaki/zulip,ufosky-server/zulip,codeKonami/zulip,johnnygaddarr/zulip,dawran6/zulip,bluesea/zulip,praveenaki/zulip,zhaoweigg/zulip,johnny9/zulip,gigawhitlocks/zulip,eastlhu/zulip,fw1121/zulip,hengqujushi/zulip,joshisa/zulip,sup95/zulip,ryanbackman/zulip,firstblade/zulip,eeshangarg/zulip,proliming/zulip,Jianchun1/zulip,jrowan/zulip,moria/zulip,grave-w-grave/zulip,rishig/zulip,jeffcao/zulip,atomic-labs/zulip,souravbadami/zulip,proliming/zulip,dxq-git/zulip,amallia/zulip,LAndreas/zulip,sonali0901/zulip,Qgap/zulip,willingc/zulip,huangkebo/zulip,karamcnair/zulip,ikasumiwt/zulip,glovebx/zulip,zorojean/zulip,sonali0901/zulip,dawran6/zulip,zhaoweigg/zulip,dwrpayne/zulip,yocome/zulip,isht3/zulip,wangdeshui/zulip,avastu/zulip,Qgap/zulip,Diptanshu8/zulip,dotcool/zulip,gkotian/zulip,shrikrishnaholla/zulip,RobotCaleb/zulip,ufosky-server/zulip,wavelets/zulip,LAndreas/zulip,jimmy54/zulip,TigorC/zulip,Batterfii/zulip,brainwane/zulip,adnanh/zulip,joyhchen/zulip,zorojean/zulip,bastianh/zulip,fw1121/zulip,Gabriel0402/zulip,paxapy/zulip,Vallher/zulip,levixie/zulip,hustlzp/zulip,natanovia/zulip,moria/zulip,j831/zulip,avastu/zulip,dwrpayne/zulip,PaulPetring/zulip,EasonYi/zulip,joshisa/zulip,vakila/zulip,vaidap/zulip,EasonYi/zulip,lfranchi/zulip,joshisa/zulip,brainwane/zulip,isht3/zulip,amallia/zulip,eeshangarg/zulip,technicalpickles/zulip,huangkebo/zulip,levixie/zulip,yuvipanda/zulip,Qgap/zulip,yuvipanda/zulip,arpitpanwar/zulip,ryansnowboarder/zulip,Suninus/zulip,JanzTam/zulip,avastu/zulip,m1ssou/zulip,arpith/zulip,arpitpanwar/zulip,punchagan/zulip,vikas-parashar/zulip,umkay/zulip,Vallher/zulip,johnny9/zulip,swinghu/zulip,verma-varsha/zulip,noroot/zulip,blaze225/zulip,hackerkid/zulip,suxinde2009/zulip,calvinleenyc/zulip,fw1121/zulip,stamhe/zulip,MariaFaBella85/zulip,ufosky-server/zulip,huangkebo/zulip,proliming/zulip,johnny9/zulip,JPJPJPOPOP/zulip,niftynei/zulip,littledogboy/zulip,lfranchi/zulip,Frouk/zulip,jainayush975/zulip,zwily/zulip,qq1012803704/zulip,glovebx/zulip,synicalsyntax/zulip,saitodisse/zulip,Drooids/zulip,jeffcao/zulip,j831/zulip,praveenaki/zulip,aakash-cr7/zulip,zulip/zulip,hengqujushi/zulip,tommyip/zulip,eastlhu/zulip,vakila/zulip,vabs22/zulip,praveenaki/zulip,eeshangarg/zulip,lfranchi/zulip,susansls/zulip,easyfmxu/zulip,Frouk/zulip,aakash-cr7/zulip,easyfmxu/zulip,johnny9/zulip,jphilipsen05/zulip,zofuthan/zulip,cosmicAsymmetry/zulip,yocome/zulip,zachallaun/zulip,Drooids/zulip,shrikrishnaholla/zulip,babbage/zulip,Gabriel0402/zulip,AZtheAsian/zulip,gkotian/zulip,wweiradio/zulip,seapasulli/zulip,suxinde2009/zulip,pradiptad/zulip,dwrpayne/zulip,developerfm/zulip,yuvipanda/zulip,dattatreya303/zulip,kou/zulip,shrikrishnaholla/zulip,shaunstanislaus/zulip,andersk/zulip,udxxabp/zulip,dnmfarrell/zulip,christi3k/zulip,amallia/zulip,brainwane/zulip,showell/zulip,MayB/zulip,kou/zulip,ApsOps/zulip,shubhamdhama/zulip,paxapy/zulip,zachallaun/zulip,Frouk/zulip,bssrdf/zulip,willingc/zulip,alliejones/zulip,pradiptad/zulip,LeeRisk/zulip,Cheppers/zulip,dwrpayne/zulip,xuanhan863/zulip,bluesea/zulip,KingxBanana/zulip,j831/zulip,zofuthan/zulip,vabs22/zulip,tbutter/zulip,showell/zulip,mohsenSy/zulip,dwrpayne/zulip,hengqujushi/zulip,eeshangarg/zulip,xuanhan863/zulip,SmartPeople/zulip,ufosky-server/zulip,aakash-cr7/zulip,sup95/zulip,jessedhillon/zulip,tommyip/zulip,Drooids/zulip,hayderimran7/zulip,dawran6/zulip,codeKonami/zulip,Suninus/zulip,ryanbackman/zulip,jackrzhang/zulip,huangkebo/zulip,babbage/zulip,zwily/zulip,akuseru/zulip,Diptanshu8/zulip,umkay/zulip,shaunstanislaus/zulip,guiquanz/zulip,stamhe/zulip,rht/zulip,wangdeshui/zulip,levixie/zulip,levixie/zulip,KJin99/zulip,jonesgithub/zulip,natanovia/zulip,aps-sids/zulip,ipernet/zulip,peguin40/zulip,qq1012803704/zulip,LeeRisk/zulip,MayB/zulip,umkay/zulip,udxxabp/zulip,PaulPetring/zulip,bowlofstew/zulip,vikas-parashar/zulip,jeffcao/zulip,punchagan/zulip,Vallher/zulip,sharmaeklavya2/zulip,TigorC/zulip,LeeRisk/zulip,andersk/zulip,aliceriot/zulip,luyifan/zulip,Diptanshu8/zulip,EasonYi/zulip,suxinde2009/zulip,arpitpanwar/zulip,samatdav/zulip,umkay/zulip,hj3938/zulip,zwily/zulip,developerfm/zulip,babbage/zulip,umkay/zulip,dhcrzf/zulip,jerryge/zulip,Vallher/zulip,PhilSk/zulip,hackerkid/zulip,souravbadami/zulip,bowlofstew/zulip,babbage/zulip,wweiradio/zulip,bluesea/zulip,zorojean/zulip,showell/zulip,rht/zulip,nicholasbs/zulip,aps-sids/zulip,andersk/zulip,arpitpanwar/zulip,Gabriel0402/zulip,bastianh/zulip,udxxabp/zulip,paxapy/zulip,shubhamdhama/zulip,mdavid/zulip,hengqujushi/zulip,tbutter/zulip,JPJPJPOPOP/zulip,zhaoweigg/zulip,peiwei/zulip,TigorC/zulip,PhilSk/zulip,blaze225/zulip,zacps/zulip,wweiradio/zulip,arpitpanwar/zulip,niftynei/zulip,xuanhan863/zulip,udxxabp/zulip,udxxabp/zulip,pradiptad/zulip,esander91/zulip,hj3938/zulip,zwily/zulip,gigawhitlocks/zulip,voidException/zulip,seapasulli/zulip,bluesea/zulip,JanzTam/zulip,glovebx/zulip,armooo/zulip,timabbott/zulip,ericzhou2008/zulip,armooo/zulip,so0k/zulip,Galexrt/zulip,johnny9/zulip,MayB/zulip,huangkebo/zulip,JPJPJPOPOP/zulip,zulip/zulip,m1ssou/zulip,ryanbackman/zulip,wavelets/zulip,hackerkid/zulip,moria/zulip,mahim97/zulip,hayderimran7/zulip,easyfmxu/zulip,technicalpickles/zulip,thomasboyt/zulip,ApsOps/zulip,rishig/zulip,peguin40/zulip,bastianh/zulip,zofuthan/zulip,m1ssou/zulip,thomasboyt/zulip,jeffcao/zulip,synicalsyntax/zulip,Diptanshu8/zulip,ipernet/zulip,kokoar/zulip,Cheppers/zulip,zhaoweigg/zulip,Diptanshu8/zulip,guiquanz/zulip,shrikrishnaholla/zulip,huangkebo/zulip,j831/zulip,zofuthan/zulip,RobotCaleb/zulip,zofuthan/zulip,johnnygaddarr/zulip,swinghu/zulip,amanharitsh123/zulip,babbage/zulip,dattatreya303/zulip,akuseru/zulip,mansilladev/zulip,dhcrzf/zulip,Suninus/zulip,zacps/zulip,tdr130/zulip,mahim97/zulip,vabs22/zulip,wangdeshui/zulip,Cheppers/zulip,RobotCaleb/zulip,dawran6/zulip,showell/zulip,themass/zulip,so0k/zulip,amyliu345/zulip,punchagan/zulip,zorojean/zulip,dwrpayne/zulip,themass/zulip,bitemyapp/zulip,nicholasbs/zulip,luyifan/zulip,zhaoweigg/zulip,peiwei/zulip,firstblade/zulip,mdavid/zulip,seapasulli/zulip,willingc/zulip,christi3k/zulip,rishig/zulip,adnanh/zulip,DazWorrall/zulip,mansilladev/zulip,TigorC/zulip,dattatreya303/zulip,gigawhitlocks/zulip,hustlzp/zulip,zulip/zulip,cosmicAsymmetry/zulip,technicalpickles/zulip,Cheppers/zulip,jerryge/zulip,dattatreya303/zulip,blaze225/zulip,jackrzhang/zulip,vaidap/zulip,avastu/zulip,avastu/zulip,JanzTam/zulip,brainwane/zulip,deer-hope/zulip,saitodisse/zulip,ufosky-server/zulip,jrowan/zulip,KingxBanana/zulip,amyliu345/zulip,ashwinirudrappa/zulip,tbutter/zulip,vabs22/zulip,umkay/zulip,andersk/zulip,KingxBanana/zulip,dnmfarrell/zulip,jessedhillon/zulip,sup95/zulip,zachallaun/zulip,KingxBanana/zulip,tdr130/zulip,suxinde2009/zulip,Juanvulcano/zulip,xuanhan863/zulip,jainayush975/zulip,bluesea/zulip,guiquanz/zulip,themass/zulip,wweiradio/zulip,Batterfii/zulip,esander91/zulip,Juanvulcano/zulip,ashwinirudrappa/zulip,wweiradio/zulip,LAndreas/zulip,susansls/zulip,jrowan/zulip,aliceriot/zulip,eeshangarg/zulip,mahim97/zulip,bssrdf/zulip,synicalsyntax/zulip,zacps/zulip,Batterfii/zulip,esander91/zulip,susansls/zulip,aliceriot/zulip,johnny9/zulip,so0k/zulip,m1ssou/zulip,qq1012803704/zulip,kaiyuanheshang/zulip,esander91/zulip,sharmaeklavya2/zulip,grave-w-grave/zulip,synicalsyntax/zulip,mansilladev/zulip,synicalsyntax/zulip,j831/zulip,EasonYi/zulip,Galexrt/zulip,themass/zulip,praveenaki/zulip,wavelets/zulip,eastlhu/zulip,schatt/zulip,isht3/zulip,aps-sids/zulip,Galexrt/zulip,itnihao/zulip,yocome/zulip,joyhchen/zulip,armooo/zulip,themass/zulip,tiansiyuan/zulip,niftynei/zulip,vabs22/zulip,reyha/zulip,rishig/zulip,wdaher/zulip,vaidap/zulip,peiwei/zulip,aakash-cr7/zulip,hafeez3000/zulip,eastlhu/zulip,m1ssou/zulip,deer-hope/zulip,he15his/zulip,noroot/zulip,zacps/zulip,amanharitsh123/zulip,fw1121/zulip,saitodisse/zulip,xuxiao/zulip,PhilSk/zulip,arpith/zulip,amallia/zulip,rishig/zulip,amyliu345/zulip,ipernet/zulip,shaunstanislaus/zulip
|
Add a management command to print the initial password for an account
(imported from commit 0a2b7d8215961801dbd24d9af89785e857b9ba14)
|
from django.core.management.base import BaseCommand
from zephyr.lib.initial_password import initial_password
class Command(BaseCommand):
help = "Print the initial password for accounts as created by populate_db"
def handle(self, *args, **options):
print
for email in args:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
continue
print '%-30s %-16s' % (email, initial_password(email))
|
<commit_before><commit_msg>Add a management command to print the initial password for an account
(imported from commit 0a2b7d8215961801dbd24d9af89785e857b9ba14)<commit_after>
|
from django.core.management.base import BaseCommand
from zephyr.lib.initial_password import initial_password
class Command(BaseCommand):
help = "Print the initial password for accounts as created by populate_db"
def handle(self, *args, **options):
print
for email in args:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
continue
print '%-30s %-16s' % (email, initial_password(email))
|
Add a management command to print the initial password for an account
(imported from commit 0a2b7d8215961801dbd24d9af89785e857b9ba14)from django.core.management.base import BaseCommand
from zephyr.lib.initial_password import initial_password
class Command(BaseCommand):
help = "Print the initial password for accounts as created by populate_db"
def handle(self, *args, **options):
print
for email in args:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
continue
print '%-30s %-16s' % (email, initial_password(email))
|
<commit_before><commit_msg>Add a management command to print the initial password for an account
(imported from commit 0a2b7d8215961801dbd24d9af89785e857b9ba14)<commit_after>from django.core.management.base import BaseCommand
from zephyr.lib.initial_password import initial_password
class Command(BaseCommand):
help = "Print the initial password for accounts as created by populate_db"
def handle(self, *args, **options):
print
for email in args:
if '@' not in email:
print 'ERROR: %s does not look like an email address' % (email,)
continue
print '%-30s %-16s' % (email, initial_password(email))
|
|
fd4f4a061a5dade9ed918ce40d3a09d78ed7cea3
|
snippets/base/migrations/0019_auto_20170726_0635.py
|
snippets/base/migrations/0019_auto_20170726_0635.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0018_auto_20170614_0925'),
]
operations = [
migrations.AlterField(
model_name='snippet',
name='campaign',
field=models.CharField(default=b'', help_text=b'Optional campaign name. Will be added in the stats ping. Will be used for snippet blocking if set.', max_length=255, blank=True),
),
]
|
Add snippet.campaign field migration for help_text change.
|
Add snippet.campaign field migration for help_text change.
|
Python
|
mpl-2.0
|
mozilla/snippets-service,mozmar/snippets-service,glogiotatidis/snippets-service,glogiotatidis/snippets-service,mozilla/snippets-service,mozmar/snippets-service,mozmar/snippets-service,glogiotatidis/snippets-service,glogiotatidis/snippets-service,mozmar/snippets-service,mozilla/snippets-service,mozilla/snippets-service
|
Add snippet.campaign field migration for help_text change.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0018_auto_20170614_0925'),
]
operations = [
migrations.AlterField(
model_name='snippet',
name='campaign',
field=models.CharField(default=b'', help_text=b'Optional campaign name. Will be added in the stats ping. Will be used for snippet blocking if set.', max_length=255, blank=True),
),
]
|
<commit_before><commit_msg>Add snippet.campaign field migration for help_text change.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0018_auto_20170614_0925'),
]
operations = [
migrations.AlterField(
model_name='snippet',
name='campaign',
field=models.CharField(default=b'', help_text=b'Optional campaign name. Will be added in the stats ping. Will be used for snippet blocking if set.', max_length=255, blank=True),
),
]
|
Add snippet.campaign field migration for help_text change.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0018_auto_20170614_0925'),
]
operations = [
migrations.AlterField(
model_name='snippet',
name='campaign',
field=models.CharField(default=b'', help_text=b'Optional campaign name. Will be added in the stats ping. Will be used for snippet blocking if set.', max_length=255, blank=True),
),
]
|
<commit_before><commit_msg>Add snippet.campaign field migration for help_text change.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0018_auto_20170614_0925'),
]
operations = [
migrations.AlterField(
model_name='snippet',
name='campaign',
field=models.CharField(default=b'', help_text=b'Optional campaign name. Will be added in the stats ping. Will be used for snippet blocking if set.', max_length=255, blank=True),
),
]
|
|
3771d3165d4873592f53d8b2401806297fe2989f
|
door/models.py
|
door/models.py
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
Remove default datetime in door
|
Remove default datetime in door
|
Python
|
mit
|
hackerspace-ntnu/website,hackerspace-ntnu/website,hackerspace-ntnu/website
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
Remove default datetime in door
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
<commit_before>from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
<commit_msg>Remove default datetime in door<commit_after>
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
Remove default datetime in doorfrom django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
<commit_before>from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField(default=timezone.now)
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField(default=timezone.now)
closed = models.DateTimeField(default=timezone.now)
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
<commit_msg>Remove default datetime in door<commit_after>from django.db import models
from django.utils import timezone
from datetime import datetime
# Create your models here.
class DoorStatus(models.Model):
datetime = models.DateTimeField()
status = models.BooleanField(default=False)
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class OpenData(models.Model):
opened = models.DateTimeField()
closed = models.DateTimeField()
total = models.IntegerField(default=0)
def __str__(self):
return str(self.opened)
|
f290b8e93dc8e833f9cfa684b6cdb9420e8bf3d6
|
adhocracy4/projects/migrations/0011_fix_copyright_field_desc.py
|
adhocracy4/projects/migrations/0011_fix_copyright_field_desc.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 14:44
from __future__ import unicode_literals
import adhocracy4.images.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('a4projects', '0010_image_copyrights'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Header image.', max_length=120, verbose_name='Header image copyright'),
),
migrations.AlterField(
model_name='project',
name='tile_image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Tile image.', max_length=120, verbose_name='Tile image copyright'),
),
]
|
Add migration with fixed copyright field descriptions
|
Add migration with fixed copyright field descriptions
|
Python
|
agpl-3.0
|
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
|
Add migration with fixed copyright field descriptions
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 14:44
from __future__ import unicode_literals
import adhocracy4.images.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('a4projects', '0010_image_copyrights'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Header image.', max_length=120, verbose_name='Header image copyright'),
),
migrations.AlterField(
model_name='project',
name='tile_image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Tile image.', max_length=120, verbose_name='Tile image copyright'),
),
]
|
<commit_before><commit_msg>Add migration with fixed copyright field descriptions<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 14:44
from __future__ import unicode_literals
import adhocracy4.images.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('a4projects', '0010_image_copyrights'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Header image.', max_length=120, verbose_name='Header image copyright'),
),
migrations.AlterField(
model_name='project',
name='tile_image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Tile image.', max_length=120, verbose_name='Tile image copyright'),
),
]
|
Add migration with fixed copyright field descriptions# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 14:44
from __future__ import unicode_literals
import adhocracy4.images.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('a4projects', '0010_image_copyrights'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Header image.', max_length=120, verbose_name='Header image copyright'),
),
migrations.AlterField(
model_name='project',
name='tile_image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Tile image.', max_length=120, verbose_name='Tile image copyright'),
),
]
|
<commit_before><commit_msg>Add migration with fixed copyright field descriptions<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-09-18 14:44
from __future__ import unicode_literals
import adhocracy4.images.fields
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('a4projects', '0010_image_copyrights'),
]
operations = [
migrations.AlterField(
model_name='project',
name='image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Header image.', max_length=120, verbose_name='Header image copyright'),
),
migrations.AlterField(
model_name='project',
name='tile_image_copyright',
field=adhocracy4.images.fields.ImageCopyrightField(blank=True, help_text='Copyright shown in the Tile image.', max_length=120, verbose_name='Tile image copyright'),
),
]
|
|
78cf46620600998af834c9a99df07e18f302a282
|
test/factory/test_text_loader_factory.py
|
test/factory/test_text_loader_factory.py
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import pytablereader as ptr
class Test_TableTextLoaderFactory:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, value, expected):
with pytest.raises(expected):
ptr.factory.TableTextLoaderFactory(value)
class Test_TableTextLoaderFactory_create_from_format_name:
@pytest.mark.parametrize(
["format_name", "expected"],
[
["csv", ptr.CsvTableTextLoader],
["CSV", ptr.CsvTableTextLoader],
["html", ptr.HtmlTableTextLoader],
["HTML", ptr.HtmlTableTextLoader],
["json", ptr.JsonTableTextLoader],
["JSON", ptr.JsonTableTextLoader],
["markdown", ptr.MarkdownTableTextLoader],
["Markdown", ptr.MarkdownTableTextLoader],
["mediawiki", ptr.MediaWikiTableTextLoader],
["MediaWiki", ptr.MediaWikiTableTextLoader],
["tsv", ptr.TsvTableTextLoader],
["TSV", ptr.TsvTableTextLoader],
],
)
def test_normal(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummy")
loader = loader_factory.create_from_format_name(format_name)
assert isinstance(loader, expected)
@pytest.mark.parametrize(
["format_name", "expected"],
[
["not_exist_format", ptr.LoaderNotFoundError],
["", ptr.LoaderNotFoundError],
[None, TypeError],
[0, TypeError],
["auto", ptr.LoaderNotFoundError],
],
)
def test_exception(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummyy")
with pytest.raises(expected):
loader_factory.create_from_format_name(format_name)
|
Add test cases for TableTextLoaderFactory
|
Add test cases for TableTextLoaderFactory
|
Python
|
mit
|
thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader
|
Add test cases for TableTextLoaderFactory
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import pytablereader as ptr
class Test_TableTextLoaderFactory:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, value, expected):
with pytest.raises(expected):
ptr.factory.TableTextLoaderFactory(value)
class Test_TableTextLoaderFactory_create_from_format_name:
@pytest.mark.parametrize(
["format_name", "expected"],
[
["csv", ptr.CsvTableTextLoader],
["CSV", ptr.CsvTableTextLoader],
["html", ptr.HtmlTableTextLoader],
["HTML", ptr.HtmlTableTextLoader],
["json", ptr.JsonTableTextLoader],
["JSON", ptr.JsonTableTextLoader],
["markdown", ptr.MarkdownTableTextLoader],
["Markdown", ptr.MarkdownTableTextLoader],
["mediawiki", ptr.MediaWikiTableTextLoader],
["MediaWiki", ptr.MediaWikiTableTextLoader],
["tsv", ptr.TsvTableTextLoader],
["TSV", ptr.TsvTableTextLoader],
],
)
def test_normal(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummy")
loader = loader_factory.create_from_format_name(format_name)
assert isinstance(loader, expected)
@pytest.mark.parametrize(
["format_name", "expected"],
[
["not_exist_format", ptr.LoaderNotFoundError],
["", ptr.LoaderNotFoundError],
[None, TypeError],
[0, TypeError],
["auto", ptr.LoaderNotFoundError],
],
)
def test_exception(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummyy")
with pytest.raises(expected):
loader_factory.create_from_format_name(format_name)
|
<commit_before><commit_msg>Add test cases for TableTextLoaderFactory<commit_after>
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import pytablereader as ptr
class Test_TableTextLoaderFactory:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, value, expected):
with pytest.raises(expected):
ptr.factory.TableTextLoaderFactory(value)
class Test_TableTextLoaderFactory_create_from_format_name:
@pytest.mark.parametrize(
["format_name", "expected"],
[
["csv", ptr.CsvTableTextLoader],
["CSV", ptr.CsvTableTextLoader],
["html", ptr.HtmlTableTextLoader],
["HTML", ptr.HtmlTableTextLoader],
["json", ptr.JsonTableTextLoader],
["JSON", ptr.JsonTableTextLoader],
["markdown", ptr.MarkdownTableTextLoader],
["Markdown", ptr.MarkdownTableTextLoader],
["mediawiki", ptr.MediaWikiTableTextLoader],
["MediaWiki", ptr.MediaWikiTableTextLoader],
["tsv", ptr.TsvTableTextLoader],
["TSV", ptr.TsvTableTextLoader],
],
)
def test_normal(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummy")
loader = loader_factory.create_from_format_name(format_name)
assert isinstance(loader, expected)
@pytest.mark.parametrize(
["format_name", "expected"],
[
["not_exist_format", ptr.LoaderNotFoundError],
["", ptr.LoaderNotFoundError],
[None, TypeError],
[0, TypeError],
["auto", ptr.LoaderNotFoundError],
],
)
def test_exception(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummyy")
with pytest.raises(expected):
loader_factory.create_from_format_name(format_name)
|
Add test cases for TableTextLoaderFactory"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import pytablereader as ptr
class Test_TableTextLoaderFactory:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, value, expected):
with pytest.raises(expected):
ptr.factory.TableTextLoaderFactory(value)
class Test_TableTextLoaderFactory_create_from_format_name:
@pytest.mark.parametrize(
["format_name", "expected"],
[
["csv", ptr.CsvTableTextLoader],
["CSV", ptr.CsvTableTextLoader],
["html", ptr.HtmlTableTextLoader],
["HTML", ptr.HtmlTableTextLoader],
["json", ptr.JsonTableTextLoader],
["JSON", ptr.JsonTableTextLoader],
["markdown", ptr.MarkdownTableTextLoader],
["Markdown", ptr.MarkdownTableTextLoader],
["mediawiki", ptr.MediaWikiTableTextLoader],
["MediaWiki", ptr.MediaWikiTableTextLoader],
["tsv", ptr.TsvTableTextLoader],
["TSV", ptr.TsvTableTextLoader],
],
)
def test_normal(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummy")
loader = loader_factory.create_from_format_name(format_name)
assert isinstance(loader, expected)
@pytest.mark.parametrize(
["format_name", "expected"],
[
["not_exist_format", ptr.LoaderNotFoundError],
["", ptr.LoaderNotFoundError],
[None, TypeError],
[0, TypeError],
["auto", ptr.LoaderNotFoundError],
],
)
def test_exception(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummyy")
with pytest.raises(expected):
loader_factory.create_from_format_name(format_name)
|
<commit_before><commit_msg>Add test cases for TableTextLoaderFactory<commit_after>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import pytest
import pytablereader as ptr
class Test_TableTextLoaderFactory:
@pytest.mark.parametrize(["value", "expected"], [[None, ValueError]])
def test_exception(self, value, expected):
with pytest.raises(expected):
ptr.factory.TableTextLoaderFactory(value)
class Test_TableTextLoaderFactory_create_from_format_name:
@pytest.mark.parametrize(
["format_name", "expected"],
[
["csv", ptr.CsvTableTextLoader],
["CSV", ptr.CsvTableTextLoader],
["html", ptr.HtmlTableTextLoader],
["HTML", ptr.HtmlTableTextLoader],
["json", ptr.JsonTableTextLoader],
["JSON", ptr.JsonTableTextLoader],
["markdown", ptr.MarkdownTableTextLoader],
["Markdown", ptr.MarkdownTableTextLoader],
["mediawiki", ptr.MediaWikiTableTextLoader],
["MediaWiki", ptr.MediaWikiTableTextLoader],
["tsv", ptr.TsvTableTextLoader],
["TSV", ptr.TsvTableTextLoader],
],
)
def test_normal(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummy")
loader = loader_factory.create_from_format_name(format_name)
assert isinstance(loader, expected)
@pytest.mark.parametrize(
["format_name", "expected"],
[
["not_exist_format", ptr.LoaderNotFoundError],
["", ptr.LoaderNotFoundError],
[None, TypeError],
[0, TypeError],
["auto", ptr.LoaderNotFoundError],
],
)
def test_exception(self, format_name, expected):
loader_factory = ptr.factory.TableTextLoaderFactory("dummyy")
with pytest.raises(expected):
loader_factory.create_from_format_name(format_name)
|
|
b81540972a24ee3146c6035ebeaacb7fbc785f13
|
dev/docs/build_api_docs.py
|
dev/docs/build_api_docs.py
|
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to generate external api_docs for FQE (Shameless copy from TFQ)."""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import doc_controls
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
import fqe
flags.DEFINE_string("output_dir", "/tmp/openfermion_api",
"Where to output the docs")
flags.DEFINE_string("code_url_prefix",
("https://github.com/quantumlib/OpenFermion-FQE/tree/master/src"
"fqe"), "The url prefix for links to code.")
flags.DEFINE_bool("search_hints", True,
"Include metadata search hints in the generated files")
flags.DEFINE_string("site_path", "quark/openfermion_fqe/api_docs/python",
"Path prefix in the _toc.yaml")
FLAGS = flags.FLAGS
def main(unused_argv):
doc_generator = generate_lib.DocGenerator(
root_title="OpenFermion-FQE",
py_modules=[("fqe", fqe)],
base_dir=os.path.dirname(fqe.__file__),
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints,
site_path=FLAGS.site_path,
callbacks=[public_api.local_definitions_filter],
)
doc_generator.build(output_dir=FLAGS.output_dir)
if __name__ == "__main__":
app.run(main)
|
Add script to build API docs.
|
Add script to build API docs.
|
Python
|
apache-2.0
|
quantumlib/OpenFermion-FQE,quantumlib/OpenFermion-FQE,quantumlib/OpenFermion-FQE
|
Add script to build API docs.
|
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to generate external api_docs for FQE (Shameless copy from TFQ)."""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import doc_controls
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
import fqe
flags.DEFINE_string("output_dir", "/tmp/openfermion_api",
"Where to output the docs")
flags.DEFINE_string("code_url_prefix",
("https://github.com/quantumlib/OpenFermion-FQE/tree/master/src"
"fqe"), "The url prefix for links to code.")
flags.DEFINE_bool("search_hints", True,
"Include metadata search hints in the generated files")
flags.DEFINE_string("site_path", "quark/openfermion_fqe/api_docs/python",
"Path prefix in the _toc.yaml")
FLAGS = flags.FLAGS
def main(unused_argv):
doc_generator = generate_lib.DocGenerator(
root_title="OpenFermion-FQE",
py_modules=[("fqe", fqe)],
base_dir=os.path.dirname(fqe.__file__),
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints,
site_path=FLAGS.site_path,
callbacks=[public_api.local_definitions_filter],
)
doc_generator.build(output_dir=FLAGS.output_dir)
if __name__ == "__main__":
app.run(main)
|
<commit_before><commit_msg>Add script to build API docs.<commit_after>
|
# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to generate external api_docs for FQE (Shameless copy from TFQ)."""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import doc_controls
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
import fqe
flags.DEFINE_string("output_dir", "/tmp/openfermion_api",
"Where to output the docs")
flags.DEFINE_string("code_url_prefix",
("https://github.com/quantumlib/OpenFermion-FQE/tree/master/src"
"fqe"), "The url prefix for links to code.")
flags.DEFINE_bool("search_hints", True,
"Include metadata search hints in the generated files")
flags.DEFINE_string("site_path", "quark/openfermion_fqe/api_docs/python",
"Path prefix in the _toc.yaml")
FLAGS = flags.FLAGS
def main(unused_argv):
doc_generator = generate_lib.DocGenerator(
root_title="OpenFermion-FQE",
py_modules=[("fqe", fqe)],
base_dir=os.path.dirname(fqe.__file__),
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints,
site_path=FLAGS.site_path,
callbacks=[public_api.local_definitions_filter],
)
doc_generator.build(output_dir=FLAGS.output_dir)
if __name__ == "__main__":
app.run(main)
|
Add script to build API docs.# Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to generate external api_docs for FQE (Shameless copy from TFQ)."""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import doc_controls
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
import fqe
flags.DEFINE_string("output_dir", "/tmp/openfermion_api",
"Where to output the docs")
flags.DEFINE_string("code_url_prefix",
("https://github.com/quantumlib/OpenFermion-FQE/tree/master/src"
"fqe"), "The url prefix for links to code.")
flags.DEFINE_bool("search_hints", True,
"Include metadata search hints in the generated files")
flags.DEFINE_string("site_path", "quark/openfermion_fqe/api_docs/python",
"Path prefix in the _toc.yaml")
FLAGS = flags.FLAGS
def main(unused_argv):
doc_generator = generate_lib.DocGenerator(
root_title="OpenFermion-FQE",
py_modules=[("fqe", fqe)],
base_dir=os.path.dirname(fqe.__file__),
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints,
site_path=FLAGS.site_path,
callbacks=[public_api.local_definitions_filter],
)
doc_generator.build(output_dir=FLAGS.output_dir)
if __name__ == "__main__":
app.run(main)
|
<commit_before><commit_msg>Add script to build API docs.<commit_after># Copyright 2021 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tool to generate external api_docs for FQE (Shameless copy from TFQ)."""
import os
from absl import app
from absl import flags
from tensorflow_docs.api_generator import doc_controls
from tensorflow_docs.api_generator import generate_lib
from tensorflow_docs.api_generator import public_api
import fqe
flags.DEFINE_string("output_dir", "/tmp/openfermion_api",
"Where to output the docs")
flags.DEFINE_string("code_url_prefix",
("https://github.com/quantumlib/OpenFermion-FQE/tree/master/src"
"fqe"), "The url prefix for links to code.")
flags.DEFINE_bool("search_hints", True,
"Include metadata search hints in the generated files")
flags.DEFINE_string("site_path", "quark/openfermion_fqe/api_docs/python",
"Path prefix in the _toc.yaml")
FLAGS = flags.FLAGS
def main(unused_argv):
doc_generator = generate_lib.DocGenerator(
root_title="OpenFermion-FQE",
py_modules=[("fqe", fqe)],
base_dir=os.path.dirname(fqe.__file__),
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints,
site_path=FLAGS.site_path,
callbacks=[public_api.local_definitions_filter],
)
doc_generator.build(output_dir=FLAGS.output_dir)
if __name__ == "__main__":
app.run(main)
|
|
f8c597e5318501ad84a0332a62c02a446685dd26
|
unyt/tests/test_unyt_testing.py
|
unyt/tests/test_unyt_testing.py
|
"""
Test unyt.testing module that contains utilities for writing tests.
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import pytest
from unyt.array import unyt_array, unyt_quantity
from unyt.testing import assert_allclose_units
from unyt.exceptions import UnitConversionError
def test_eqaulity():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
assert_allclose_units(a1, a2)
def test_unequal_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([4.0, 5.0, 6.0], "cm")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_conversion_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "kg")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_runtime_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(RuntimeError):
assert_allclose_units(a1, a2, rtol=unyt_quantity(1e-7, "cm"))
def test_operation_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(UnitConversionError):
assert_allclose_units(a1, a2, atol=unyt_quantity(0.0, "kg"))
# from unyt import assert_allclose_units, m
# actual = [1e-5, 1e-3, 1e-1] * m
# desired = actual.to("cm")
# assert_allclose_units(actual, desired)
|
Add tests for unyt.testing module
|
Add tests for unyt.testing module
|
Python
|
bsd-3-clause
|
yt-project/unyt
|
Add tests for unyt.testing module
|
"""
Test unyt.testing module that contains utilities for writing tests.
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import pytest
from unyt.array import unyt_array, unyt_quantity
from unyt.testing import assert_allclose_units
from unyt.exceptions import UnitConversionError
def test_eqaulity():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
assert_allclose_units(a1, a2)
def test_unequal_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([4.0, 5.0, 6.0], "cm")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_conversion_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "kg")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_runtime_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(RuntimeError):
assert_allclose_units(a1, a2, rtol=unyt_quantity(1e-7, "cm"))
def test_operation_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(UnitConversionError):
assert_allclose_units(a1, a2, atol=unyt_quantity(0.0, "kg"))
# from unyt import assert_allclose_units, m
# actual = [1e-5, 1e-3, 1e-1] * m
# desired = actual.to("cm")
# assert_allclose_units(actual, desired)
|
<commit_before><commit_msg>Add tests for unyt.testing module<commit_after>
|
"""
Test unyt.testing module that contains utilities for writing tests.
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import pytest
from unyt.array import unyt_array, unyt_quantity
from unyt.testing import assert_allclose_units
from unyt.exceptions import UnitConversionError
def test_eqaulity():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
assert_allclose_units(a1, a2)
def test_unequal_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([4.0, 5.0, 6.0], "cm")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_conversion_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "kg")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_runtime_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(RuntimeError):
assert_allclose_units(a1, a2, rtol=unyt_quantity(1e-7, "cm"))
def test_operation_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(UnitConversionError):
assert_allclose_units(a1, a2, atol=unyt_quantity(0.0, "kg"))
# from unyt import assert_allclose_units, m
# actual = [1e-5, 1e-3, 1e-1] * m
# desired = actual.to("cm")
# assert_allclose_units(actual, desired)
|
Add tests for unyt.testing module"""
Test unyt.testing module that contains utilities for writing tests.
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import pytest
from unyt.array import unyt_array, unyt_quantity
from unyt.testing import assert_allclose_units
from unyt.exceptions import UnitConversionError
def test_eqaulity():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
assert_allclose_units(a1, a2)
def test_unequal_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([4.0, 5.0, 6.0], "cm")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_conversion_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "kg")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_runtime_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(RuntimeError):
assert_allclose_units(a1, a2, rtol=unyt_quantity(1e-7, "cm"))
def test_operation_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(UnitConversionError):
assert_allclose_units(a1, a2, atol=unyt_quantity(0.0, "kg"))
# from unyt import assert_allclose_units, m
# actual = [1e-5, 1e-3, 1e-1] * m
# desired = actual.to("cm")
# assert_allclose_units(actual, desired)
|
<commit_before><commit_msg>Add tests for unyt.testing module<commit_after>"""
Test unyt.testing module that contains utilities for writing tests.
"""
# ----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
import pytest
from unyt.array import unyt_array, unyt_quantity
from unyt.testing import assert_allclose_units
from unyt.exceptions import UnitConversionError
def test_eqaulity():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
assert_allclose_units(a1, a2)
def test_unequal_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([4.0, 5.0, 6.0], "cm")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_conversion_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "kg")
with pytest.raises(AssertionError):
assert_allclose_units(a1, a2)
def test_runtime_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(RuntimeError):
assert_allclose_units(a1, a2, rtol=unyt_quantity(1e-7, "cm"))
def test_operation_error():
a1 = unyt_array([1.0, 2.0, 3.0], "cm")
a2 = unyt_array([1.0, 2.0, 3.0], "cm")
with pytest.raises(UnitConversionError):
assert_allclose_units(a1, a2, atol=unyt_quantity(0.0, "kg"))
# from unyt import assert_allclose_units, m
# actual = [1e-5, 1e-3, 1e-1] * m
# desired = actual.to("cm")
# assert_allclose_units(actual, desired)
|
|
26e9ccea2d7970a1ed9c18f61c03f09e3629cc6c
|
gaphor/UML/interactions/tests/test_interactionspropertypages.py
|
gaphor/UML/interactions/tests/test_interactionspropertypages.py
|
from gaphor import UML
from gaphor.diagram.tests.fixtures import find
from gaphor.UML.interactions.interactionspropertypages import MessagePropertyPage
def test_message_property_page(diagram, element_factory):
item = diagram.create(
UML.interactions.MessageItem, subject=element_factory.create(UML.Message)
)
property_page = MessagePropertyPage(item)
widget = property_page.construct()
message_combo = find(widget, "message-combo")
message_combo.set_active(2)
assert item.subject.messageSort == "asynchSignal"
|
Add tests for message property page
|
Add tests for message property page
|
Python
|
lgpl-2.1
|
amolenaar/gaphor,amolenaar/gaphor
|
Add tests for message property page
|
from gaphor import UML
from gaphor.diagram.tests.fixtures import find
from gaphor.UML.interactions.interactionspropertypages import MessagePropertyPage
def test_message_property_page(diagram, element_factory):
item = diagram.create(
UML.interactions.MessageItem, subject=element_factory.create(UML.Message)
)
property_page = MessagePropertyPage(item)
widget = property_page.construct()
message_combo = find(widget, "message-combo")
message_combo.set_active(2)
assert item.subject.messageSort == "asynchSignal"
|
<commit_before><commit_msg>Add tests for message property page<commit_after>
|
from gaphor import UML
from gaphor.diagram.tests.fixtures import find
from gaphor.UML.interactions.interactionspropertypages import MessagePropertyPage
def test_message_property_page(diagram, element_factory):
item = diagram.create(
UML.interactions.MessageItem, subject=element_factory.create(UML.Message)
)
property_page = MessagePropertyPage(item)
widget = property_page.construct()
message_combo = find(widget, "message-combo")
message_combo.set_active(2)
assert item.subject.messageSort == "asynchSignal"
|
Add tests for message property pagefrom gaphor import UML
from gaphor.diagram.tests.fixtures import find
from gaphor.UML.interactions.interactionspropertypages import MessagePropertyPage
def test_message_property_page(diagram, element_factory):
item = diagram.create(
UML.interactions.MessageItem, subject=element_factory.create(UML.Message)
)
property_page = MessagePropertyPage(item)
widget = property_page.construct()
message_combo = find(widget, "message-combo")
message_combo.set_active(2)
assert item.subject.messageSort == "asynchSignal"
|
<commit_before><commit_msg>Add tests for message property page<commit_after>from gaphor import UML
from gaphor.diagram.tests.fixtures import find
from gaphor.UML.interactions.interactionspropertypages import MessagePropertyPage
def test_message_property_page(diagram, element_factory):
item = diagram.create(
UML.interactions.MessageItem, subject=element_factory.create(UML.Message)
)
property_page = MessagePropertyPage(item)
widget = property_page.construct()
message_combo = find(widget, "message-combo")
message_combo.set_active(2)
assert item.subject.messageSort == "asynchSignal"
|
|
45d1dd90b1794897d72b25d9090bed785387560f
|
spacy/tests/doc/test_pickle_doc.py
|
spacy/tests/doc/test_pickle_doc.py
|
from __future__ import unicode_literals
import pickle
from ...language import Language
def test_pickle_single_doc():
nlp = Language()
doc = nlp(u'pickle roundtrip')
data = pickle.dumps(doc, 1)
doc2 = pickle.loads(data)
assert doc2.text == 'pickle roundtrip'
def test_list_of_docs_pickles_efficiently():
nlp = Language()
one_pickled = pickle.dumps(nlp(u'0'), -1)
docs = list(nlp.pipe(str(i) for i in range(100)))
many_pickled = pickle.dumps(docs, -1)
assert len(many_pickled) < (len(one_pickled) * 2)
many_unpickled = pickle.loads(many_pickled)
assert many_unpickled[0].text == '0'
assert many_unpickled[-1].text == '99'
assert len(many_unpickled) == 99
|
Add tests for pickling doc
|
Add tests for pickling doc
|
Python
|
mit
|
explosion/spaCy,aikramer2/spaCy,explosion/spaCy,aikramer2/spaCy,aikramer2/spaCy,recognai/spaCy,spacy-io/spaCy,honnibal/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,spacy-io/spaCy
|
Add tests for pickling doc
|
from __future__ import unicode_literals
import pickle
from ...language import Language
def test_pickle_single_doc():
nlp = Language()
doc = nlp(u'pickle roundtrip')
data = pickle.dumps(doc, 1)
doc2 = pickle.loads(data)
assert doc2.text == 'pickle roundtrip'
def test_list_of_docs_pickles_efficiently():
nlp = Language()
one_pickled = pickle.dumps(nlp(u'0'), -1)
docs = list(nlp.pipe(str(i) for i in range(100)))
many_pickled = pickle.dumps(docs, -1)
assert len(many_pickled) < (len(one_pickled) * 2)
many_unpickled = pickle.loads(many_pickled)
assert many_unpickled[0].text == '0'
assert many_unpickled[-1].text == '99'
assert len(many_unpickled) == 99
|
<commit_before><commit_msg>Add tests for pickling doc<commit_after>
|
from __future__ import unicode_literals
import pickle
from ...language import Language
def test_pickle_single_doc():
nlp = Language()
doc = nlp(u'pickle roundtrip')
data = pickle.dumps(doc, 1)
doc2 = pickle.loads(data)
assert doc2.text == 'pickle roundtrip'
def test_list_of_docs_pickles_efficiently():
nlp = Language()
one_pickled = pickle.dumps(nlp(u'0'), -1)
docs = list(nlp.pipe(str(i) for i in range(100)))
many_pickled = pickle.dumps(docs, -1)
assert len(many_pickled) < (len(one_pickled) * 2)
many_unpickled = pickle.loads(many_pickled)
assert many_unpickled[0].text == '0'
assert many_unpickled[-1].text == '99'
assert len(many_unpickled) == 99
|
Add tests for pickling docfrom __future__ import unicode_literals
import pickle
from ...language import Language
def test_pickle_single_doc():
nlp = Language()
doc = nlp(u'pickle roundtrip')
data = pickle.dumps(doc, 1)
doc2 = pickle.loads(data)
assert doc2.text == 'pickle roundtrip'
def test_list_of_docs_pickles_efficiently():
nlp = Language()
one_pickled = pickle.dumps(nlp(u'0'), -1)
docs = list(nlp.pipe(str(i) for i in range(100)))
many_pickled = pickle.dumps(docs, -1)
assert len(many_pickled) < (len(one_pickled) * 2)
many_unpickled = pickle.loads(many_pickled)
assert many_unpickled[0].text == '0'
assert many_unpickled[-1].text == '99'
assert len(many_unpickled) == 99
|
<commit_before><commit_msg>Add tests for pickling doc<commit_after>from __future__ import unicode_literals
import pickle
from ...language import Language
def test_pickle_single_doc():
nlp = Language()
doc = nlp(u'pickle roundtrip')
data = pickle.dumps(doc, 1)
doc2 = pickle.loads(data)
assert doc2.text == 'pickle roundtrip'
def test_list_of_docs_pickles_efficiently():
nlp = Language()
one_pickled = pickle.dumps(nlp(u'0'), -1)
docs = list(nlp.pipe(str(i) for i in range(100)))
many_pickled = pickle.dumps(docs, -1)
assert len(many_pickled) < (len(one_pickled) * 2)
many_unpickled = pickle.loads(many_pickled)
assert many_unpickled[0].text == '0'
assert many_unpickled[-1].text == '99'
assert len(many_unpickled) == 99
|
|
a18da9856a95f7206f2ee49f728b8e8ba5f12814
|
CLI/convSH.py
|
CLI/convSH.py
|
#!/usr/bin/python
import sys, subprocess, os, re
if len(sys.argv) < 3:
print('Incorrect usage')
exit(1)
if not os.path.isfile(sys.argv[1]):
print('File does not exist')
exit(1)
od = subprocess.check_output(['/usr/bin/objdump', '-d', sys.argv[1]])
od = od.split('\n')
code = []
rx = re.compile(r'^ ([0-9|a-z]+):(.*)')
for line in od:
res = rx.match(line)
if not res is None:
code.append(res.group(0).split('\t')[1].strip())
shellcode = ' '.join(code)
shellcode = shellcode.replace(' ', '\\x')
shellcode = '\\x' + shellcode
with open(sys.argv[2], 'w') as codeFile:
codeFile.write('#include <stdio.h>\n\n')
codeFile.write('char code[] = "' + shellcode + '";\n\n')
codeFile.write('int main() {\n')
codeFile.write(' int (*func)();\n')
codeFile.write(' func = (int (*)()) code;\n')
codeFile.write(' (int)(*func)();\n')
codeFile.write(' return 0;\n')
codeFile.write('}\n')
|
Add convert to shellcode tool
|
Add convert to shellcode tool
|
Python
|
mit
|
reykjalin/tools,reykjalin/tools
|
Add convert to shellcode tool
|
#!/usr/bin/python
import sys, subprocess, os, re
if len(sys.argv) < 3:
print('Incorrect usage')
exit(1)
if not os.path.isfile(sys.argv[1]):
print('File does not exist')
exit(1)
od = subprocess.check_output(['/usr/bin/objdump', '-d', sys.argv[1]])
od = od.split('\n')
code = []
rx = re.compile(r'^ ([0-9|a-z]+):(.*)')
for line in od:
res = rx.match(line)
if not res is None:
code.append(res.group(0).split('\t')[1].strip())
shellcode = ' '.join(code)
shellcode = shellcode.replace(' ', '\\x')
shellcode = '\\x' + shellcode
with open(sys.argv[2], 'w') as codeFile:
codeFile.write('#include <stdio.h>\n\n')
codeFile.write('char code[] = "' + shellcode + '";\n\n')
codeFile.write('int main() {\n')
codeFile.write(' int (*func)();\n')
codeFile.write(' func = (int (*)()) code;\n')
codeFile.write(' (int)(*func)();\n')
codeFile.write(' return 0;\n')
codeFile.write('}\n')
|
<commit_before><commit_msg>Add convert to shellcode tool<commit_after>
|
#!/usr/bin/python
import sys, subprocess, os, re
if len(sys.argv) < 3:
print('Incorrect usage')
exit(1)
if not os.path.isfile(sys.argv[1]):
print('File does not exist')
exit(1)
od = subprocess.check_output(['/usr/bin/objdump', '-d', sys.argv[1]])
od = od.split('\n')
code = []
rx = re.compile(r'^ ([0-9|a-z]+):(.*)')
for line in od:
res = rx.match(line)
if not res is None:
code.append(res.group(0).split('\t')[1].strip())
shellcode = ' '.join(code)
shellcode = shellcode.replace(' ', '\\x')
shellcode = '\\x' + shellcode
with open(sys.argv[2], 'w') as codeFile:
codeFile.write('#include <stdio.h>\n\n')
codeFile.write('char code[] = "' + shellcode + '";\n\n')
codeFile.write('int main() {\n')
codeFile.write(' int (*func)();\n')
codeFile.write(' func = (int (*)()) code;\n')
codeFile.write(' (int)(*func)();\n')
codeFile.write(' return 0;\n')
codeFile.write('}\n')
|
Add convert to shellcode tool#!/usr/bin/python
import sys, subprocess, os, re
if len(sys.argv) < 3:
print('Incorrect usage')
exit(1)
if not os.path.isfile(sys.argv[1]):
print('File does not exist')
exit(1)
od = subprocess.check_output(['/usr/bin/objdump', '-d', sys.argv[1]])
od = od.split('\n')
code = []
rx = re.compile(r'^ ([0-9|a-z]+):(.*)')
for line in od:
res = rx.match(line)
if not res is None:
code.append(res.group(0).split('\t')[1].strip())
shellcode = ' '.join(code)
shellcode = shellcode.replace(' ', '\\x')
shellcode = '\\x' + shellcode
with open(sys.argv[2], 'w') as codeFile:
codeFile.write('#include <stdio.h>\n\n')
codeFile.write('char code[] = "' + shellcode + '";\n\n')
codeFile.write('int main() {\n')
codeFile.write(' int (*func)();\n')
codeFile.write(' func = (int (*)()) code;\n')
codeFile.write(' (int)(*func)();\n')
codeFile.write(' return 0;\n')
codeFile.write('}\n')
|
<commit_before><commit_msg>Add convert to shellcode tool<commit_after>#!/usr/bin/python
import sys, subprocess, os, re
if len(sys.argv) < 3:
print('Incorrect usage')
exit(1)
if not os.path.isfile(sys.argv[1]):
print('File does not exist')
exit(1)
od = subprocess.check_output(['/usr/bin/objdump', '-d', sys.argv[1]])
od = od.split('\n')
code = []
rx = re.compile(r'^ ([0-9|a-z]+):(.*)')
for line in od:
res = rx.match(line)
if not res is None:
code.append(res.group(0).split('\t')[1].strip())
shellcode = ' '.join(code)
shellcode = shellcode.replace(' ', '\\x')
shellcode = '\\x' + shellcode
with open(sys.argv[2], 'w') as codeFile:
codeFile.write('#include <stdio.h>\n\n')
codeFile.write('char code[] = "' + shellcode + '";\n\n')
codeFile.write('int main() {\n')
codeFile.write(' int (*func)();\n')
codeFile.write(' func = (int (*)()) code;\n')
codeFile.write(' (int)(*func)();\n')
codeFile.write(' return 0;\n')
codeFile.write('}\n')
|
|
94d808cf8c353a49161ac6b1ff9c40671e82f9bd
|
modules/testing/test_literature_reference.py
|
modules/testing/test_literature_reference.py
|
#!/usr/bin/env python
"""
Created by: Lee Bergstrand (2017)
Description: A simple unittest for testing the literature reference module.
"""
import unittest
from modules.literature_reference import parse_literature_references
class TestLiteratureReference(unittest.TestCase):
"""A unit testing class for testing the literature_reference.py module. To be called by nosetests."""
def test_parse_literature_reference(self):
"""Test that literature reference rows can be parsed."""
literature_reference = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly')
]
reference = parse_literature_references(literature_reference)
self.assertEqual(len(reference), 1)
first_reference = reference[0]
self.assertEqual(first_reference.number, 1)
self.assertEqual(first_reference.pubmed_id, 11952905)
self.assertEqual(first_reference.title, 'Identification of genes that are associated with DNA repeats.')
self.assertEqual(first_reference.authors, 'Jansen R, Embden JD, Gaastra W, Schouls LM;')
self.assertEqual(first_reference.citation, 'Mol Microbiol. 2002;43:1565-1575.')
def test_parse_multiple_database_references(self):
"""Test that literature reference rows consisting of multiple references can be parsed."""
literature_references = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats in prokaryotes.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('RN', '[2]'),
('RM', '16292354'),
('RT', 'A guild of 45 CRISPR-associated (Cas) protein families.'),
('RA', 'Haft DH, Selengut J, Mongodin EF, Nelson KE;'),
('RL', 'PLoS Comput Biol. 2005;1:e60.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly Interspaced Short')
]
references = parse_literature_references(literature_references)
self.assertEqual(len(references), 2)
|
Add automated tests for literature reference module.
|
Add automated tests for literature reference module.
|
Python
|
apache-2.0
|
LeeBergstrand/pygenprop
|
Add automated tests for literature reference module.
|
#!/usr/bin/env python
"""
Created by: Lee Bergstrand (2017)
Description: A simple unittest for testing the literature reference module.
"""
import unittest
from modules.literature_reference import parse_literature_references
class TestLiteratureReference(unittest.TestCase):
"""A unit testing class for testing the literature_reference.py module. To be called by nosetests."""
def test_parse_literature_reference(self):
"""Test that literature reference rows can be parsed."""
literature_reference = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly')
]
reference = parse_literature_references(literature_reference)
self.assertEqual(len(reference), 1)
first_reference = reference[0]
self.assertEqual(first_reference.number, 1)
self.assertEqual(first_reference.pubmed_id, 11952905)
self.assertEqual(first_reference.title, 'Identification of genes that are associated with DNA repeats.')
self.assertEqual(first_reference.authors, 'Jansen R, Embden JD, Gaastra W, Schouls LM;')
self.assertEqual(first_reference.citation, 'Mol Microbiol. 2002;43:1565-1575.')
def test_parse_multiple_database_references(self):
"""Test that literature reference rows consisting of multiple references can be parsed."""
literature_references = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats in prokaryotes.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('RN', '[2]'),
('RM', '16292354'),
('RT', 'A guild of 45 CRISPR-associated (Cas) protein families.'),
('RA', 'Haft DH, Selengut J, Mongodin EF, Nelson KE;'),
('RL', 'PLoS Comput Biol. 2005;1:e60.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly Interspaced Short')
]
references = parse_literature_references(literature_references)
self.assertEqual(len(references), 2)
|
<commit_before><commit_msg>Add automated tests for literature reference module.<commit_after>
|
#!/usr/bin/env python
"""
Created by: Lee Bergstrand (2017)
Description: A simple unittest for testing the literature reference module.
"""
import unittest
from modules.literature_reference import parse_literature_references
class TestLiteratureReference(unittest.TestCase):
"""A unit testing class for testing the literature_reference.py module. To be called by nosetests."""
def test_parse_literature_reference(self):
"""Test that literature reference rows can be parsed."""
literature_reference = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly')
]
reference = parse_literature_references(literature_reference)
self.assertEqual(len(reference), 1)
first_reference = reference[0]
self.assertEqual(first_reference.number, 1)
self.assertEqual(first_reference.pubmed_id, 11952905)
self.assertEqual(first_reference.title, 'Identification of genes that are associated with DNA repeats.')
self.assertEqual(first_reference.authors, 'Jansen R, Embden JD, Gaastra W, Schouls LM;')
self.assertEqual(first_reference.citation, 'Mol Microbiol. 2002;43:1565-1575.')
def test_parse_multiple_database_references(self):
"""Test that literature reference rows consisting of multiple references can be parsed."""
literature_references = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats in prokaryotes.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('RN', '[2]'),
('RM', '16292354'),
('RT', 'A guild of 45 CRISPR-associated (Cas) protein families.'),
('RA', 'Haft DH, Selengut J, Mongodin EF, Nelson KE;'),
('RL', 'PLoS Comput Biol. 2005;1:e60.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly Interspaced Short')
]
references = parse_literature_references(literature_references)
self.assertEqual(len(references), 2)
|
Add automated tests for literature reference module.#!/usr/bin/env python
"""
Created by: Lee Bergstrand (2017)
Description: A simple unittest for testing the literature reference module.
"""
import unittest
from modules.literature_reference import parse_literature_references
class TestLiteratureReference(unittest.TestCase):
"""A unit testing class for testing the literature_reference.py module. To be called by nosetests."""
def test_parse_literature_reference(self):
"""Test that literature reference rows can be parsed."""
literature_reference = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly')
]
reference = parse_literature_references(literature_reference)
self.assertEqual(len(reference), 1)
first_reference = reference[0]
self.assertEqual(first_reference.number, 1)
self.assertEqual(first_reference.pubmed_id, 11952905)
self.assertEqual(first_reference.title, 'Identification of genes that are associated with DNA repeats.')
self.assertEqual(first_reference.authors, 'Jansen R, Embden JD, Gaastra W, Schouls LM;')
self.assertEqual(first_reference.citation, 'Mol Microbiol. 2002;43:1565-1575.')
def test_parse_multiple_database_references(self):
"""Test that literature reference rows consisting of multiple references can be parsed."""
literature_references = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats in prokaryotes.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('RN', '[2]'),
('RM', '16292354'),
('RT', 'A guild of 45 CRISPR-associated (Cas) protein families.'),
('RA', 'Haft DH, Selengut J, Mongodin EF, Nelson KE;'),
('RL', 'PLoS Comput Biol. 2005;1:e60.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly Interspaced Short')
]
references = parse_literature_references(literature_references)
self.assertEqual(len(references), 2)
|
<commit_before><commit_msg>Add automated tests for literature reference module.<commit_after>#!/usr/bin/env python
"""
Created by: Lee Bergstrand (2017)
Description: A simple unittest for testing the literature reference module.
"""
import unittest
from modules.literature_reference import parse_literature_references
class TestLiteratureReference(unittest.TestCase):
"""A unit testing class for testing the literature_reference.py module. To be called by nosetests."""
def test_parse_literature_reference(self):
"""Test that literature reference rows can be parsed."""
literature_reference = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly')
]
reference = parse_literature_references(literature_reference)
self.assertEqual(len(reference), 1)
first_reference = reference[0]
self.assertEqual(first_reference.number, 1)
self.assertEqual(first_reference.pubmed_id, 11952905)
self.assertEqual(first_reference.title, 'Identification of genes that are associated with DNA repeats.')
self.assertEqual(first_reference.authors, 'Jansen R, Embden JD, Gaastra W, Schouls LM;')
self.assertEqual(first_reference.citation, 'Mol Microbiol. 2002;43:1565-1575.')
def test_parse_multiple_database_references(self):
"""Test that literature reference rows consisting of multiple references can be parsed."""
literature_references = [
('TH', '1'),
('RN', '[1]'),
('RM', '11952905'),
('RT', 'Identification of genes that are associated with DNA repeats in prokaryotes.'),
('RA', 'Jansen R, Embden JD, Gaastra W, Schouls LM;'),
('RL', 'Mol Microbiol. 2002;43:1565-1575.'),
('RN', '[2]'),
('RM', '16292354'),
('RT', 'A guild of 45 CRISPR-associated (Cas) protein families.'),
('RA', 'Haft DH, Selengut J, Mongodin EF, Nelson KE;'),
('RL', 'PLoS Comput Biol. 2005;1:e60.'),
('CC', 'CRISPR repeats are by definition Clustered Regularly Interspaced Short')
]
references = parse_literature_references(literature_references)
self.assertEqual(len(references), 2)
|
|
354fcdbe045e80119b1edd4b322589e56df09c39
|
examples/buffer_results.py
|
examples/buffer_results.py
|
"""
Process 20 lines of output at a time.
Example runs:
python buffer_results.py
"""
import sys
import time
import drainers
# fake this
def setup_cruncher():
time.sleep(1)
def do_something_expensive(file):
time.sleep(0.005)
def destroy_cruncher():
time.sleep(0.8)
files = []
def crunch(files):
print 'Setting up cruncher...'
setup_cruncher()
while len(files) > 0:
f = files.pop(0)
print '- Crunching file %s...' % f.strip()
do_something_expensive(f)
print 'Releasing cruncher...'
destroy_cruncher()
def add_to_buffer(line, is_err):
if is_err:
# ignore all errors
return
files.append(line)
# start crunch synchronously after 20 items have been read
if len(files) >= 20:
crunch(files)
d = drainers.Drainer(['find', '.', '-type', 'f'], read_event_cb=add_to_buffer)
d.start()
|
Add result buffering/batch processing example.
|
Add result buffering/batch processing example.
|
Python
|
bsd-3-clause
|
nvie/python-drainers,nvie/python-drainers
|
Add result buffering/batch processing example.
|
"""
Process 20 lines of output at a time.
Example runs:
python buffer_results.py
"""
import sys
import time
import drainers
# fake this
def setup_cruncher():
time.sleep(1)
def do_something_expensive(file):
time.sleep(0.005)
def destroy_cruncher():
time.sleep(0.8)
files = []
def crunch(files):
print 'Setting up cruncher...'
setup_cruncher()
while len(files) > 0:
f = files.pop(0)
print '- Crunching file %s...' % f.strip()
do_something_expensive(f)
print 'Releasing cruncher...'
destroy_cruncher()
def add_to_buffer(line, is_err):
if is_err:
# ignore all errors
return
files.append(line)
# start crunch synchronously after 20 items have been read
if len(files) >= 20:
crunch(files)
d = drainers.Drainer(['find', '.', '-type', 'f'], read_event_cb=add_to_buffer)
d.start()
|
<commit_before><commit_msg>Add result buffering/batch processing example.<commit_after>
|
"""
Process 20 lines of output at a time.
Example runs:
python buffer_results.py
"""
import sys
import time
import drainers
# fake this
def setup_cruncher():
time.sleep(1)
def do_something_expensive(file):
time.sleep(0.005)
def destroy_cruncher():
time.sleep(0.8)
files = []
def crunch(files):
print 'Setting up cruncher...'
setup_cruncher()
while len(files) > 0:
f = files.pop(0)
print '- Crunching file %s...' % f.strip()
do_something_expensive(f)
print 'Releasing cruncher...'
destroy_cruncher()
def add_to_buffer(line, is_err):
if is_err:
# ignore all errors
return
files.append(line)
# start crunch synchronously after 20 items have been read
if len(files) >= 20:
crunch(files)
d = drainers.Drainer(['find', '.', '-type', 'f'], read_event_cb=add_to_buffer)
d.start()
|
Add result buffering/batch processing example."""
Process 20 lines of output at a time.
Example runs:
python buffer_results.py
"""
import sys
import time
import drainers
# fake this
def setup_cruncher():
time.sleep(1)
def do_something_expensive(file):
time.sleep(0.005)
def destroy_cruncher():
time.sleep(0.8)
files = []
def crunch(files):
print 'Setting up cruncher...'
setup_cruncher()
while len(files) > 0:
f = files.pop(0)
print '- Crunching file %s...' % f.strip()
do_something_expensive(f)
print 'Releasing cruncher...'
destroy_cruncher()
def add_to_buffer(line, is_err):
if is_err:
# ignore all errors
return
files.append(line)
# start crunch synchronously after 20 items have been read
if len(files) >= 20:
crunch(files)
d = drainers.Drainer(['find', '.', '-type', 'f'], read_event_cb=add_to_buffer)
d.start()
|
<commit_before><commit_msg>Add result buffering/batch processing example.<commit_after>"""
Process 20 lines of output at a time.
Example runs:
python buffer_results.py
"""
import sys
import time
import drainers
# fake this
def setup_cruncher():
time.sleep(1)
def do_something_expensive(file):
time.sleep(0.005)
def destroy_cruncher():
time.sleep(0.8)
files = []
def crunch(files):
print 'Setting up cruncher...'
setup_cruncher()
while len(files) > 0:
f = files.pop(0)
print '- Crunching file %s...' % f.strip()
do_something_expensive(f)
print 'Releasing cruncher...'
destroy_cruncher()
def add_to_buffer(line, is_err):
if is_err:
# ignore all errors
return
files.append(line)
# start crunch synchronously after 20 items have been read
if len(files) >= 20:
crunch(files)
d = drainers.Drainer(['find', '.', '-type', 'f'], read_event_cb=add_to_buffer)
d.start()
|
|
61af01f79158dffeb9e4649cbaa98a9cb77cfd99
|
examples/example01.py
|
examples/example01.py
|
from strip_recipes import RecipeFile
recipe = RecipeFile()
recipe.record_start('example01')
recipe.sbs_on()
for cur_gate in (10.0, 20.0, 50.0):
recipe.bias_set('HA1_Vg', cur_gate)
recipe.wait(10.0)
recipe.sbs_off()
recipe.record_stop()
with open('out.txt', 'wt') as fout:
recipe.write_to_file(fout)
|
Add a new, simpler example
|
Add a new, simpler example
|
Python
|
mit
|
lspestrip/strip_recipes
|
Add a new, simpler example
|
from strip_recipes import RecipeFile
recipe = RecipeFile()
recipe.record_start('example01')
recipe.sbs_on()
for cur_gate in (10.0, 20.0, 50.0):
recipe.bias_set('HA1_Vg', cur_gate)
recipe.wait(10.0)
recipe.sbs_off()
recipe.record_stop()
with open('out.txt', 'wt') as fout:
recipe.write_to_file(fout)
|
<commit_before><commit_msg>Add a new, simpler example<commit_after>
|
from strip_recipes import RecipeFile
recipe = RecipeFile()
recipe.record_start('example01')
recipe.sbs_on()
for cur_gate in (10.0, 20.0, 50.0):
recipe.bias_set('HA1_Vg', cur_gate)
recipe.wait(10.0)
recipe.sbs_off()
recipe.record_stop()
with open('out.txt', 'wt') as fout:
recipe.write_to_file(fout)
|
Add a new, simpler examplefrom strip_recipes import RecipeFile
recipe = RecipeFile()
recipe.record_start('example01')
recipe.sbs_on()
for cur_gate in (10.0, 20.0, 50.0):
recipe.bias_set('HA1_Vg', cur_gate)
recipe.wait(10.0)
recipe.sbs_off()
recipe.record_stop()
with open('out.txt', 'wt') as fout:
recipe.write_to_file(fout)
|
<commit_before><commit_msg>Add a new, simpler example<commit_after>from strip_recipes import RecipeFile
recipe = RecipeFile()
recipe.record_start('example01')
recipe.sbs_on()
for cur_gate in (10.0, 20.0, 50.0):
recipe.bias_set('HA1_Vg', cur_gate)
recipe.wait(10.0)
recipe.sbs_off()
recipe.record_stop()
with open('out.txt', 'wt') as fout:
recipe.write_to_file(fout)
|
|
9d91e0dd2d6175fe1a45d1a556a33c08133185b3
|
cc/license/tests/test_rdfa.py
|
cc/license/tests/test_rdfa.py
|
"""Unit tests utilizing RDFa parsing. Primarily cc.license.formatter."""
import cc.license
import rdfadict
import rdflib
class TestHtmlFormatter:
def __init__(self):
self.parser = rdfadict.RdfaParser()
self.base = 'FOOBAR'
self.lic = cc.license.by_code('by')
self.fmtr = cc.license.formatters.HTML
# define namespaces
self.w3 = rdflib.Namespace('http://www.w3.org/1999/xhtml/vocab#')
def parse(self, rdfa_string):
return self.parser.parse_string(rdfa_string, self.base)
def test_basic(self):
r = self.fmtr.format(self.lic)
trips = self.parse(r)
assert self.lic.uri in trips[self.base][str(self.w3.license)]
|
Create initial RDFa tests as their own top-level test module.
|
Create initial RDFa tests as their own top-level test module.
|
Python
|
mit
|
creativecommons/cc.license,creativecommons/cc.license
|
Create initial RDFa tests as their own top-level test module.
|
"""Unit tests utilizing RDFa parsing. Primarily cc.license.formatter."""
import cc.license
import rdfadict
import rdflib
class TestHtmlFormatter:
def __init__(self):
self.parser = rdfadict.RdfaParser()
self.base = 'FOOBAR'
self.lic = cc.license.by_code('by')
self.fmtr = cc.license.formatters.HTML
# define namespaces
self.w3 = rdflib.Namespace('http://www.w3.org/1999/xhtml/vocab#')
def parse(self, rdfa_string):
return self.parser.parse_string(rdfa_string, self.base)
def test_basic(self):
r = self.fmtr.format(self.lic)
trips = self.parse(r)
assert self.lic.uri in trips[self.base][str(self.w3.license)]
|
<commit_before><commit_msg>Create initial RDFa tests as their own top-level test module.<commit_after>
|
"""Unit tests utilizing RDFa parsing. Primarily cc.license.formatter."""
import cc.license
import rdfadict
import rdflib
class TestHtmlFormatter:
def __init__(self):
self.parser = rdfadict.RdfaParser()
self.base = 'FOOBAR'
self.lic = cc.license.by_code('by')
self.fmtr = cc.license.formatters.HTML
# define namespaces
self.w3 = rdflib.Namespace('http://www.w3.org/1999/xhtml/vocab#')
def parse(self, rdfa_string):
return self.parser.parse_string(rdfa_string, self.base)
def test_basic(self):
r = self.fmtr.format(self.lic)
trips = self.parse(r)
assert self.lic.uri in trips[self.base][str(self.w3.license)]
|
Create initial RDFa tests as their own top-level test module."""Unit tests utilizing RDFa parsing. Primarily cc.license.formatter."""
import cc.license
import rdfadict
import rdflib
class TestHtmlFormatter:
def __init__(self):
self.parser = rdfadict.RdfaParser()
self.base = 'FOOBAR'
self.lic = cc.license.by_code('by')
self.fmtr = cc.license.formatters.HTML
# define namespaces
self.w3 = rdflib.Namespace('http://www.w3.org/1999/xhtml/vocab#')
def parse(self, rdfa_string):
return self.parser.parse_string(rdfa_string, self.base)
def test_basic(self):
r = self.fmtr.format(self.lic)
trips = self.parse(r)
assert self.lic.uri in trips[self.base][str(self.w3.license)]
|
<commit_before><commit_msg>Create initial RDFa tests as their own top-level test module.<commit_after>"""Unit tests utilizing RDFa parsing. Primarily cc.license.formatter."""
import cc.license
import rdfadict
import rdflib
class TestHtmlFormatter:
def __init__(self):
self.parser = rdfadict.RdfaParser()
self.base = 'FOOBAR'
self.lic = cc.license.by_code('by')
self.fmtr = cc.license.formatters.HTML
# define namespaces
self.w3 = rdflib.Namespace('http://www.w3.org/1999/xhtml/vocab#')
def parse(self, rdfa_string):
return self.parser.parse_string(rdfa_string, self.base)
def test_basic(self):
r = self.fmtr.format(self.lic)
trips = self.parse(r)
assert self.lic.uri in trips[self.base][str(self.w3.license)]
|
|
61b36218cc0cf74e90ac7ee8d7f02b1ffffe3890
|
blues/wkhtmltopdf.py
|
blues/wkhtmltopdf.py
|
"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
|
"""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
Install wkhtmltox from the pkgs on sourceforge
|
Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run.
|
Python
|
mit
|
gelbander/blues,andreif/blues,chrippa/blues,5monkeys/blues,adisbladis/blues,5monkeys/blues,Sportamore/blues,adisbladis/blues,andreif/blues,gelbander/blues,chrippa/blues,Sportamore/blues,jocke-l/blues,jocke-l/blues,andreif/blues,Sportamore/blues,adisbladis/blues,gelbander/blues,chrippa/blues,5monkeys/blues,jocke-l/blues
|
"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run.
|
"""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
<commit_before>"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
<commit_msg>Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run.<commit_after>
|
"""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run."""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
<commit_before>"""
wkhtmltopdf Blueprint
blueprints:
- blues.wkhtmltopdf
"""
from fabric.decorators import task
from refabric.context_managers import sudo
from refabric.contrib import blueprints
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
configure()
def install():
with sudo():
packages = ['wkhtmltopdf', 'xvfb', 'xfonts-100dpi', 'xfonts-75dpi', 'xfonts-cyrillic']
debian.apt_get('install', *packages)
@task
def configure():
"""
Configure wkhtmltopdf
"""
destination = '/usr/local/bin/wkhtmltopdf.sh'
blueprint.upload('wkhtmltopdf.sh', destination)
with sudo():
debian.chmod(destination, '+x')
<commit_msg>Install wkhtmltox from the pkgs on sourceforge
They're compiled with patched QT. This version doesn't need X/Xvfb to run.<commit_after>"""
wkhtmltopdf Blueprint
.. code-block:: yaml
blueprints:
- blues.wkhtmltopdf
settings:
wkhtmltopdf:
# wkhtmltopdf_version: 0.12.2.1
"""
from fabric.decorators import task
from refabric.context_managers import sudo, settings
from refabric.contrib import blueprints
from refabric.operations import run
from . import debian
__all__ = ['setup', 'configure']
blueprint = blueprints.get(__name__)
@task
def setup():
"""
Install and configure wkhtmltopdf
"""
install()
def install():
"""
Install wkhtmltox from the pkgs on sourceforge that are compiled with
patched QT. This version doesn't need X/Xvfb to run.
"""
# Can't be named version since it'll conflict with fabrics own version variable
wkhtmltox_ver = blueprint.get('wkhtmltopdf_version', '0.12.2.1')
wkhtmltox_pkg = 'wkhtmltox-{}_linux-{}-amd64.deb'.format(
wkhtmltox_ver, debian.lbs_codename())
wkhtmltox_url = 'http://downloads.sourceforge.net/project/wkhtmltopdf/{}/{}'.format(
wkhtmltox_ver, wkhtmltox_pkg)
run('curl --silent --location --show-error --remote-name "{}"'.format(
wkhtmltox_url))
with sudo():
with settings(warn_only=True):
run('dpkg -i {}'.format(wkhtmltox_pkg))
debian.apt_get('--fix-broken', 'install')
debian.rm(wkhtmltox_pkg)
@task
def configure():
"""
Configure wkhtmltopdf
"""
pass
|
68693163fc4bcc7432a9890a37721cac3641ca51
|
livetests.py
|
livetests.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License
#
# Copyright 2018 David Pursehouse. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Live server tests."""
import unittest
from pygerrit2.rest import GerritRestAPI, GerritReview
from pygerrit2.rest.auth import HTTPBasicAuthFromNetrc
url = "http://localhost:8080"
auth = HTTPBasicAuthFromNetrc(url)
api = GerritRestAPI(url=url, auth=auth)
class TestLiveServer(unittest.TestCase):
"""Test that GerritRestAPI behaves properly against a live server."""
def test_live_server(self):
"""Run the tests."""
# Post with content as dict
changeinput = {"project": "test-project",
"subject": "subject",
"branch": "master",
"topic": "topic"}
change = api.post("/changes/", json=changeinput)
id = change["id"]
# Get
api.get("/changes/" + id)
# Put with content as string
api.put("/changes/" + id + "/edit/foo", data="content")
# Put with no content
api.put("/changes/" + id + "/edit/foo")
# Review by API
rev = GerritReview()
rev.set_message("Review from live test")
rev.add_labels({"Code-Review": 1})
api.review(id, "current", rev)
if __name__ == '__main__':
unittest.main()
|
Add basic tests running against a live Gerrit server
|
Add basic tests running against a live Gerrit server
Change-Id: I313a9132d2de7a87b3257af7846febb64b3873ee
|
Python
|
mit
|
dpursehouse/pygerrit2
|
Add basic tests running against a live Gerrit server
Change-Id: I313a9132d2de7a87b3257af7846febb64b3873ee
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License
#
# Copyright 2018 David Pursehouse. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Live server tests."""
import unittest
from pygerrit2.rest import GerritRestAPI, GerritReview
from pygerrit2.rest.auth import HTTPBasicAuthFromNetrc
url = "http://localhost:8080"
auth = HTTPBasicAuthFromNetrc(url)
api = GerritRestAPI(url=url, auth=auth)
class TestLiveServer(unittest.TestCase):
"""Test that GerritRestAPI behaves properly against a live server."""
def test_live_server(self):
"""Run the tests."""
# Post with content as dict
changeinput = {"project": "test-project",
"subject": "subject",
"branch": "master",
"topic": "topic"}
change = api.post("/changes/", json=changeinput)
id = change["id"]
# Get
api.get("/changes/" + id)
# Put with content as string
api.put("/changes/" + id + "/edit/foo", data="content")
# Put with no content
api.put("/changes/" + id + "/edit/foo")
# Review by API
rev = GerritReview()
rev.set_message("Review from live test")
rev.add_labels({"Code-Review": 1})
api.review(id, "current", rev)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic tests running against a live Gerrit server
Change-Id: I313a9132d2de7a87b3257af7846febb64b3873ee<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License
#
# Copyright 2018 David Pursehouse. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Live server tests."""
import unittest
from pygerrit2.rest import GerritRestAPI, GerritReview
from pygerrit2.rest.auth import HTTPBasicAuthFromNetrc
url = "http://localhost:8080"
auth = HTTPBasicAuthFromNetrc(url)
api = GerritRestAPI(url=url, auth=auth)
class TestLiveServer(unittest.TestCase):
"""Test that GerritRestAPI behaves properly against a live server."""
def test_live_server(self):
"""Run the tests."""
# Post with content as dict
changeinput = {"project": "test-project",
"subject": "subject",
"branch": "master",
"topic": "topic"}
change = api.post("/changes/", json=changeinput)
id = change["id"]
# Get
api.get("/changes/" + id)
# Put with content as string
api.put("/changes/" + id + "/edit/foo", data="content")
# Put with no content
api.put("/changes/" + id + "/edit/foo")
# Review by API
rev = GerritReview()
rev.set_message("Review from live test")
rev.add_labels({"Code-Review": 1})
api.review(id, "current", rev)
if __name__ == '__main__':
unittest.main()
|
Add basic tests running against a live Gerrit server
Change-Id: I313a9132d2de7a87b3257af7846febb64b3873ee#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License
#
# Copyright 2018 David Pursehouse. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Live server tests."""
import unittest
from pygerrit2.rest import GerritRestAPI, GerritReview
from pygerrit2.rest.auth import HTTPBasicAuthFromNetrc
url = "http://localhost:8080"
auth = HTTPBasicAuthFromNetrc(url)
api = GerritRestAPI(url=url, auth=auth)
class TestLiveServer(unittest.TestCase):
"""Test that GerritRestAPI behaves properly against a live server."""
def test_live_server(self):
"""Run the tests."""
# Post with content as dict
changeinput = {"project": "test-project",
"subject": "subject",
"branch": "master",
"topic": "topic"}
change = api.post("/changes/", json=changeinput)
id = change["id"]
# Get
api.get("/changes/" + id)
# Put with content as string
api.put("/changes/" + id + "/edit/foo", data="content")
# Put with no content
api.put("/changes/" + id + "/edit/foo")
# Review by API
rev = GerritReview()
rev.set_message("Review from live test")
rev.add_labels({"Code-Review": 1})
api.review(id, "current", rev)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add basic tests running against a live Gerrit server
Change-Id: I313a9132d2de7a87b3257af7846febb64b3873ee<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# The MIT License
#
# Copyright 2018 David Pursehouse. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Live server tests."""
import unittest
from pygerrit2.rest import GerritRestAPI, GerritReview
from pygerrit2.rest.auth import HTTPBasicAuthFromNetrc
url = "http://localhost:8080"
auth = HTTPBasicAuthFromNetrc(url)
api = GerritRestAPI(url=url, auth=auth)
class TestLiveServer(unittest.TestCase):
"""Test that GerritRestAPI behaves properly against a live server."""
def test_live_server(self):
"""Run the tests."""
# Post with content as dict
changeinput = {"project": "test-project",
"subject": "subject",
"branch": "master",
"topic": "topic"}
change = api.post("/changes/", json=changeinput)
id = change["id"]
# Get
api.get("/changes/" + id)
# Put with content as string
api.put("/changes/" + id + "/edit/foo", data="content")
# Put with no content
api.put("/changes/" + id + "/edit/foo")
# Review by API
rev = GerritReview()
rev.set_message("Review from live test")
rev.add_labels({"Code-Review": 1})
api.review(id, "current", rev)
if __name__ == '__main__':
unittest.main()
|
|
cb8977ed160933701dd6588150ee0f00d14511b6
|
books/migrations/0002_auto_20151116_2351.py
|
books/migrations/0002_auto_20151116_2351.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='receipt',
name='created',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2015, 11, 16, 21, 51, 54, 805686, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='receipt',
name='modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 16, 21, 50, 54, 951814, tzinfo=utc)),
),
]
|
Add migration for Receipt created field
|
Add migration for Receipt created field
|
Python
|
mit
|
trimailov/finance,trimailov/finance,trimailov/finance
|
Add migration for Receipt created field
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='receipt',
name='created',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2015, 11, 16, 21, 51, 54, 805686, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='receipt',
name='modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 16, 21, 50, 54, 951814, tzinfo=utc)),
),
]
|
<commit_before><commit_msg>Add migration for Receipt created field<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='receipt',
name='created',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2015, 11, 16, 21, 51, 54, 805686, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='receipt',
name='modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 16, 21, 50, 54, 951814, tzinfo=utc)),
),
]
|
Add migration for Receipt created field# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='receipt',
name='created',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2015, 11, 16, 21, 51, 54, 805686, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='receipt',
name='modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 16, 21, 50, 54, 951814, tzinfo=utc)),
),
]
|
<commit_before><commit_msg>Add migration for Receipt created field<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('books', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='receipt',
name='created',
field=models.DateTimeField(auto_now=True, default=datetime.datetime(2015, 11, 16, 21, 51, 54, 805686, tzinfo=utc)),
preserve_default=False,
),
migrations.AddField(
model_name='receipt',
name='modified',
field=models.DateTimeField(default=datetime.datetime(2015, 11, 16, 21, 50, 54, 951814, tzinfo=utc)),
),
]
|
|
d4f53913c2e3aee61fe59e42a66e0ac279b186a1
|
data-structures/queue/queue_tests.py
|
data-structures/queue/queue_tests.py
|
import unittest
from queue import Queue
class TestStringMethods(unittest.TestCase):
def test_enqueue_to_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.enqueue(1)
self.assertEqual(queue.size, 1, "Queue should contain one element")
def test_enqueue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
def test_dequeue_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
element = queue.dequeue()
self.assertIsNone(element)
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.dequeue()
queue.dequeue()
queue.dequeue()
self.assertEqual(queue.size, 0, "Queue should be empty")
def test_dequeue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
# Test FIFO (first in first out)
one = queue.dequeue()
self.assertEqual(one, 1, "Should be 1")
self.assertEqual(queue.size, 5 - 1, "Queue size should decrease")
two = queue.dequeue()
self.assertEqual(two, 2, "Should be 2")
self.assertEqual(queue.size, 5 - 2, "Queue size should decrease")
three = queue.dequeue()
self.assertEqual(three, 3, "Should be 3")
self.assertEqual(queue.size, 5 - 3, "Queue size should decrease")
four = queue.dequeue()
self.assertEqual(four, 4, "Should be 4")
self.assertEqual(queue.size, 5 - 4, "Queue size should decrease")
five = queue.dequeue()
self.assertEqual(five, 5, "Should be 5")
self.assertEqual(queue.size, 5 - 5, "Queue size should decrease")
none = queue.dequeue()
self.assertIsNone(none, "Queue should be empty")
self.assertEqual(queue.size, 0, "Queue should be empty")
if __name__ == '__main__':
unittest.main()
|
Add tests for queue implementation
|
Add tests for queue implementation
|
Python
|
mit
|
julianespinel/trainning,julianespinel/training,julianespinel/training,julianespinel/training,julianespinel/training,julianespinel/trainning
|
Add tests for queue implementation
|
import unittest
from queue import Queue
class TestStringMethods(unittest.TestCase):
def test_enqueue_to_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.enqueue(1)
self.assertEqual(queue.size, 1, "Queue should contain one element")
def test_enqueue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
def test_dequeue_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
element = queue.dequeue()
self.assertIsNone(element)
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.dequeue()
queue.dequeue()
queue.dequeue()
self.assertEqual(queue.size, 0, "Queue should be empty")
def test_dequeue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
# Test FIFO (first in first out)
one = queue.dequeue()
self.assertEqual(one, 1, "Should be 1")
self.assertEqual(queue.size, 5 - 1, "Queue size should decrease")
two = queue.dequeue()
self.assertEqual(two, 2, "Should be 2")
self.assertEqual(queue.size, 5 - 2, "Queue size should decrease")
three = queue.dequeue()
self.assertEqual(three, 3, "Should be 3")
self.assertEqual(queue.size, 5 - 3, "Queue size should decrease")
four = queue.dequeue()
self.assertEqual(four, 4, "Should be 4")
self.assertEqual(queue.size, 5 - 4, "Queue size should decrease")
five = queue.dequeue()
self.assertEqual(five, 5, "Should be 5")
self.assertEqual(queue.size, 5 - 5, "Queue size should decrease")
none = queue.dequeue()
self.assertIsNone(none, "Queue should be empty")
self.assertEqual(queue.size, 0, "Queue should be empty")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for queue implementation<commit_after>
|
import unittest
from queue import Queue
class TestStringMethods(unittest.TestCase):
def test_enqueue_to_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.enqueue(1)
self.assertEqual(queue.size, 1, "Queue should contain one element")
def test_enqueue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
def test_dequeue_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
element = queue.dequeue()
self.assertIsNone(element)
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.dequeue()
queue.dequeue()
queue.dequeue()
self.assertEqual(queue.size, 0, "Queue should be empty")
def test_dequeue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
# Test FIFO (first in first out)
one = queue.dequeue()
self.assertEqual(one, 1, "Should be 1")
self.assertEqual(queue.size, 5 - 1, "Queue size should decrease")
two = queue.dequeue()
self.assertEqual(two, 2, "Should be 2")
self.assertEqual(queue.size, 5 - 2, "Queue size should decrease")
three = queue.dequeue()
self.assertEqual(three, 3, "Should be 3")
self.assertEqual(queue.size, 5 - 3, "Queue size should decrease")
four = queue.dequeue()
self.assertEqual(four, 4, "Should be 4")
self.assertEqual(queue.size, 5 - 4, "Queue size should decrease")
five = queue.dequeue()
self.assertEqual(five, 5, "Should be 5")
self.assertEqual(queue.size, 5 - 5, "Queue size should decrease")
none = queue.dequeue()
self.assertIsNone(none, "Queue should be empty")
self.assertEqual(queue.size, 0, "Queue should be empty")
if __name__ == '__main__':
unittest.main()
|
Add tests for queue implementationimport unittest
from queue import Queue
class TestStringMethods(unittest.TestCase):
def test_enqueue_to_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.enqueue(1)
self.assertEqual(queue.size, 1, "Queue should contain one element")
def test_enqueue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
def test_dequeue_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
element = queue.dequeue()
self.assertIsNone(element)
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.dequeue()
queue.dequeue()
queue.dequeue()
self.assertEqual(queue.size, 0, "Queue should be empty")
def test_dequeue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
# Test FIFO (first in first out)
one = queue.dequeue()
self.assertEqual(one, 1, "Should be 1")
self.assertEqual(queue.size, 5 - 1, "Queue size should decrease")
two = queue.dequeue()
self.assertEqual(two, 2, "Should be 2")
self.assertEqual(queue.size, 5 - 2, "Queue size should decrease")
three = queue.dequeue()
self.assertEqual(three, 3, "Should be 3")
self.assertEqual(queue.size, 5 - 3, "Queue size should decrease")
four = queue.dequeue()
self.assertEqual(four, 4, "Should be 4")
self.assertEqual(queue.size, 5 - 4, "Queue size should decrease")
five = queue.dequeue()
self.assertEqual(five, 5, "Should be 5")
self.assertEqual(queue.size, 5 - 5, "Queue size should decrease")
none = queue.dequeue()
self.assertIsNone(none, "Queue should be empty")
self.assertEqual(queue.size, 0, "Queue should be empty")
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for queue implementation<commit_after>import unittest
from queue import Queue
class TestStringMethods(unittest.TestCase):
def test_enqueue_to_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.enqueue(1)
self.assertEqual(queue.size, 1, "Queue should contain one element")
def test_enqueue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
def test_dequeue_empty_queue(self):
queue = Queue()
self.assertEqual(queue.size, 0, "Queue should be empty")
element = queue.dequeue()
self.assertIsNone(element)
self.assertEqual(queue.size, 0, "Queue should be empty")
queue.dequeue()
queue.dequeue()
queue.dequeue()
self.assertEqual(queue.size, 0, "Queue should be empty")
def test_dequeue_non_empty_queue(self):
queue = Queue()
queue.enqueue(1)
queue.enqueue(2)
queue.enqueue(3)
queue.enqueue(4)
queue.enqueue(5)
self.assertEqual(queue.size, 5, "Queue should contain one element")
# Test FIFO (first in first out)
one = queue.dequeue()
self.assertEqual(one, 1, "Should be 1")
self.assertEqual(queue.size, 5 - 1, "Queue size should decrease")
two = queue.dequeue()
self.assertEqual(two, 2, "Should be 2")
self.assertEqual(queue.size, 5 - 2, "Queue size should decrease")
three = queue.dequeue()
self.assertEqual(three, 3, "Should be 3")
self.assertEqual(queue.size, 5 - 3, "Queue size should decrease")
four = queue.dequeue()
self.assertEqual(four, 4, "Should be 4")
self.assertEqual(queue.size, 5 - 4, "Queue size should decrease")
five = queue.dequeue()
self.assertEqual(five, 5, "Should be 5")
self.assertEqual(queue.size, 5 - 5, "Queue size should decrease")
none = queue.dequeue()
self.assertIsNone(none, "Queue should be empty")
self.assertEqual(queue.size, 0, "Queue should be empty")
if __name__ == '__main__':
unittest.main()
|
|
5147674980d3d81314bbe3e5ddbad4c01e9ffc21
|
makepanda/test_wheel.py
|
makepanda/test_wheel.py
|
#!/usr/bin/env python
"""
Tests a .whl file by installing it and pytest into a virtual environment and
running the test suite.
Requires pip to be installed, as well as 'virtualenv' on Python 2.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from optparse import OptionParser
def test_wheel(wheel, verbose=False):
envdir = tempfile.mkdtemp(prefix="venv-")
print("Setting up virtual environment in {0}".format(envdir))
if sys.version_info >= (3, 0):
subprocess.call([sys.executable, "-m", "venv", "--clear", envdir])
else:
subprocess.call([sys.executable, "-m", "virtualenv", "--clear", envdir])
# Install pytest into the environment, as well as our wheel.
if sys.platform == "win32":
pip = os.path.join(envdir, "Scripts", "pip.exe")
else:
pip = os.path.join(envdir, "bin", "pip")
if subprocess.call([pip, "install", "pytest", wheel]) != 0:
shutil.rmtree(envdir)
sys.exit(1)
# Run the test suite.
if sys.platform == "win32":
python = os.path.join(envdir, "Scripts", "python.exe")
else:
python = os.path.join(envdir, "bin", "python")
test_cmd = [python, "-m", "pytest", "tests"]
if verbose:
test_cmd.append("--verbose")
exit_code = subprocess.call(test_cmd)
shutil.rmtree(envdir)
if exit_code != 0:
sys.exit(exit_code)
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] file...")
parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
for arg in args:
test_wheel(arg, verbose=options.verbose)
|
Add script to run test suite on a wheel in a virtualenv
|
Add script to run test suite on a wheel in a virtualenv
[skip ci]
|
Python
|
bsd-3-clause
|
chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d,chandler14362/panda3d
|
Add script to run test suite on a wheel in a virtualenv
[skip ci]
|
#!/usr/bin/env python
"""
Tests a .whl file by installing it and pytest into a virtual environment and
running the test suite.
Requires pip to be installed, as well as 'virtualenv' on Python 2.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from optparse import OptionParser
def test_wheel(wheel, verbose=False):
envdir = tempfile.mkdtemp(prefix="venv-")
print("Setting up virtual environment in {0}".format(envdir))
if sys.version_info >= (3, 0):
subprocess.call([sys.executable, "-m", "venv", "--clear", envdir])
else:
subprocess.call([sys.executable, "-m", "virtualenv", "--clear", envdir])
# Install pytest into the environment, as well as our wheel.
if sys.platform == "win32":
pip = os.path.join(envdir, "Scripts", "pip.exe")
else:
pip = os.path.join(envdir, "bin", "pip")
if subprocess.call([pip, "install", "pytest", wheel]) != 0:
shutil.rmtree(envdir)
sys.exit(1)
# Run the test suite.
if sys.platform == "win32":
python = os.path.join(envdir, "Scripts", "python.exe")
else:
python = os.path.join(envdir, "bin", "python")
test_cmd = [python, "-m", "pytest", "tests"]
if verbose:
test_cmd.append("--verbose")
exit_code = subprocess.call(test_cmd)
shutil.rmtree(envdir)
if exit_code != 0:
sys.exit(exit_code)
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] file...")
parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
for arg in args:
test_wheel(arg, verbose=options.verbose)
|
<commit_before><commit_msg>Add script to run test suite on a wheel in a virtualenv
[skip ci]<commit_after>
|
#!/usr/bin/env python
"""
Tests a .whl file by installing it and pytest into a virtual environment and
running the test suite.
Requires pip to be installed, as well as 'virtualenv' on Python 2.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from optparse import OptionParser
def test_wheel(wheel, verbose=False):
envdir = tempfile.mkdtemp(prefix="venv-")
print("Setting up virtual environment in {0}".format(envdir))
if sys.version_info >= (3, 0):
subprocess.call([sys.executable, "-m", "venv", "--clear", envdir])
else:
subprocess.call([sys.executable, "-m", "virtualenv", "--clear", envdir])
# Install pytest into the environment, as well as our wheel.
if sys.platform == "win32":
pip = os.path.join(envdir, "Scripts", "pip.exe")
else:
pip = os.path.join(envdir, "bin", "pip")
if subprocess.call([pip, "install", "pytest", wheel]) != 0:
shutil.rmtree(envdir)
sys.exit(1)
# Run the test suite.
if sys.platform == "win32":
python = os.path.join(envdir, "Scripts", "python.exe")
else:
python = os.path.join(envdir, "bin", "python")
test_cmd = [python, "-m", "pytest", "tests"]
if verbose:
test_cmd.append("--verbose")
exit_code = subprocess.call(test_cmd)
shutil.rmtree(envdir)
if exit_code != 0:
sys.exit(exit_code)
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] file...")
parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
for arg in args:
test_wheel(arg, verbose=options.verbose)
|
Add script to run test suite on a wheel in a virtualenv
[skip ci]#!/usr/bin/env python
"""
Tests a .whl file by installing it and pytest into a virtual environment and
running the test suite.
Requires pip to be installed, as well as 'virtualenv' on Python 2.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from optparse import OptionParser
def test_wheel(wheel, verbose=False):
envdir = tempfile.mkdtemp(prefix="venv-")
print("Setting up virtual environment in {0}".format(envdir))
if sys.version_info >= (3, 0):
subprocess.call([sys.executable, "-m", "venv", "--clear", envdir])
else:
subprocess.call([sys.executable, "-m", "virtualenv", "--clear", envdir])
# Install pytest into the environment, as well as our wheel.
if sys.platform == "win32":
pip = os.path.join(envdir, "Scripts", "pip.exe")
else:
pip = os.path.join(envdir, "bin", "pip")
if subprocess.call([pip, "install", "pytest", wheel]) != 0:
shutil.rmtree(envdir)
sys.exit(1)
# Run the test suite.
if sys.platform == "win32":
python = os.path.join(envdir, "Scripts", "python.exe")
else:
python = os.path.join(envdir, "bin", "python")
test_cmd = [python, "-m", "pytest", "tests"]
if verbose:
test_cmd.append("--verbose")
exit_code = subprocess.call(test_cmd)
shutil.rmtree(envdir)
if exit_code != 0:
sys.exit(exit_code)
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] file...")
parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
for arg in args:
test_wheel(arg, verbose=options.verbose)
|
<commit_before><commit_msg>Add script to run test suite on a wheel in a virtualenv
[skip ci]<commit_after>#!/usr/bin/env python
"""
Tests a .whl file by installing it and pytest into a virtual environment and
running the test suite.
Requires pip to be installed, as well as 'virtualenv' on Python 2.
"""
import os
import sys
import shutil
import subprocess
import tempfile
from optparse import OptionParser
def test_wheel(wheel, verbose=False):
envdir = tempfile.mkdtemp(prefix="venv-")
print("Setting up virtual environment in {0}".format(envdir))
if sys.version_info >= (3, 0):
subprocess.call([sys.executable, "-m", "venv", "--clear", envdir])
else:
subprocess.call([sys.executable, "-m", "virtualenv", "--clear", envdir])
# Install pytest into the environment, as well as our wheel.
if sys.platform == "win32":
pip = os.path.join(envdir, "Scripts", "pip.exe")
else:
pip = os.path.join(envdir, "bin", "pip")
if subprocess.call([pip, "install", "pytest", wheel]) != 0:
shutil.rmtree(envdir)
sys.exit(1)
# Run the test suite.
if sys.platform == "win32":
python = os.path.join(envdir, "Scripts", "python.exe")
else:
python = os.path.join(envdir, "bin", "python")
test_cmd = [python, "-m", "pytest", "tests"]
if verbose:
test_cmd.append("--verbose")
exit_code = subprocess.call(test_cmd)
shutil.rmtree(envdir)
if exit_code != 0:
sys.exit(exit_code)
if __name__ == "__main__":
parser = OptionParser(usage="%prog [options] file...")
parser.add_option('', '--verbose', dest = 'verbose', help = 'Enable verbose output', action = 'store_true', default = False)
(options, args) = parser.parse_args()
if not args:
parser.print_usage()
sys.exit(1)
for arg in args:
test_wheel(arg, verbose=options.verbose)
|
|
4e2e9ba9336d2e60084cda9623e7573785f4f0e0
|
computing_frequencies_with_mismatches.py
|
computing_frequencies_with_mismatches.py
|
from neighbors import neighbors
from pattern_to_number import pattern_to_number
def computing_frequencies_with_mismatches(text, k, d):
frequency_array = [0] * 4**k
for i in range(0, len(text) - k):
pattern = text[i:k]
neighborhood = neighbors(pattern, d)
for approximate_pattern in neighborhood:
j = pattern_to_number(approximate_pattern)
frequency_array[j] = frequency_array[j] + 1
return frequency_array
if __name__ == "__main__":
text = raw_input("Text: ")
k, d = map(int, raw_input("K / D: ").split(" "))
print computing_frequencies_with_mismatches(text, k, d)
|
Add computing frequencies with mismatches
|
Add computing frequencies with mismatches
|
Python
|
mit
|
dennis95stumm/bioinformatics_algorithms,dennis95stumm/bioinformatics_algorithms
|
Add computing frequencies with mismatches
|
from neighbors import neighbors
from pattern_to_number import pattern_to_number
def computing_frequencies_with_mismatches(text, k, d):
frequency_array = [0] * 4**k
for i in range(0, len(text) - k):
pattern = text[i:k]
neighborhood = neighbors(pattern, d)
for approximate_pattern in neighborhood:
j = pattern_to_number(approximate_pattern)
frequency_array[j] = frequency_array[j] + 1
return frequency_array
if __name__ == "__main__":
text = raw_input("Text: ")
k, d = map(int, raw_input("K / D: ").split(" "))
print computing_frequencies_with_mismatches(text, k, d)
|
<commit_before><commit_msg>Add computing frequencies with mismatches<commit_after>
|
from neighbors import neighbors
from pattern_to_number import pattern_to_number
def computing_frequencies_with_mismatches(text, k, d):
frequency_array = [0] * 4**k
for i in range(0, len(text) - k):
pattern = text[i:k]
neighborhood = neighbors(pattern, d)
for approximate_pattern in neighborhood:
j = pattern_to_number(approximate_pattern)
frequency_array[j] = frequency_array[j] + 1
return frequency_array
if __name__ == "__main__":
text = raw_input("Text: ")
k, d = map(int, raw_input("K / D: ").split(" "))
print computing_frequencies_with_mismatches(text, k, d)
|
Add computing frequencies with mismatchesfrom neighbors import neighbors
from pattern_to_number import pattern_to_number
def computing_frequencies_with_mismatches(text, k, d):
frequency_array = [0] * 4**k
for i in range(0, len(text) - k):
pattern = text[i:k]
neighborhood = neighbors(pattern, d)
for approximate_pattern in neighborhood:
j = pattern_to_number(approximate_pattern)
frequency_array[j] = frequency_array[j] + 1
return frequency_array
if __name__ == "__main__":
text = raw_input("Text: ")
k, d = map(int, raw_input("K / D: ").split(" "))
print computing_frequencies_with_mismatches(text, k, d)
|
<commit_before><commit_msg>Add computing frequencies with mismatches<commit_after>from neighbors import neighbors
from pattern_to_number import pattern_to_number
def computing_frequencies_with_mismatches(text, k, d):
frequency_array = [0] * 4**k
for i in range(0, len(text) - k):
pattern = text[i:k]
neighborhood = neighbors(pattern, d)
for approximate_pattern in neighborhood:
j = pattern_to_number(approximate_pattern)
frequency_array[j] = frequency_array[j] + 1
return frequency_array
if __name__ == "__main__":
text = raw_input("Text: ")
k, d = map(int, raw_input("K / D: ").split(" "))
print computing_frequencies_with_mismatches(text, k, d)
|
|
3f6fe2658fef0f0a27f58e67da7cc64c2fba7371
|
chnnlsdmo/chnnlsdmo/admin.py
|
chnnlsdmo/chnnlsdmo/admin.py
|
from django.contrib import admin
from .models import Voter, Flag, Vote
admin.site.register(Voter)
admin.site.register(Flag)
admin.site.register(Vote)
|
Make all models visible in Admin
|
Make all models visible in Admin
|
Python
|
bsd-3-clause
|
shearichard/django-channels-demo,shearichard/django-channels-demo,shearichard/django-channels-demo
|
Make all models visible in Admin
|
from django.contrib import admin
from .models import Voter, Flag, Vote
admin.site.register(Voter)
admin.site.register(Flag)
admin.site.register(Vote)
|
<commit_before><commit_msg>Make all models visible in Admin<commit_after>
|
from django.contrib import admin
from .models import Voter, Flag, Vote
admin.site.register(Voter)
admin.site.register(Flag)
admin.site.register(Vote)
|
Make all models visible in Adminfrom django.contrib import admin
from .models import Voter, Flag, Vote
admin.site.register(Voter)
admin.site.register(Flag)
admin.site.register(Vote)
|
<commit_before><commit_msg>Make all models visible in Admin<commit_after>from django.contrib import admin
from .models import Voter, Flag, Vote
admin.site.register(Voter)
admin.site.register(Flag)
admin.site.register(Vote)
|
|
d549285e2269f0078fefdb054d542c2cbb26dc89
|
integrated_tests/test_interactions.py
|
integrated_tests/test_interactions.py
|
from unittest import TestCase
import pygtop
class InteractionPropertyTests(TestCase):
def test_can_get_gtop_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.gtop_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_external_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_external_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_all_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_pdbs()
self.assertIn("4IAQ", pdbs)
|
Add integrated tests for interaction PDBs
|
Add integrated tests for interaction PDBs
|
Python
|
mit
|
samirelanduk/pygtop
|
Add integrated tests for interaction PDBs
|
from unittest import TestCase
import pygtop
class InteractionPropertyTests(TestCase):
def test_can_get_gtop_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.gtop_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_external_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_external_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_all_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_pdbs()
self.assertIn("4IAQ", pdbs)
|
<commit_before><commit_msg>Add integrated tests for interaction PDBs<commit_after>
|
from unittest import TestCase
import pygtop
class InteractionPropertyTests(TestCase):
def test_can_get_gtop_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.gtop_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_external_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_external_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_all_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_pdbs()
self.assertIn("4IAQ", pdbs)
|
Add integrated tests for interaction PDBsfrom unittest import TestCase
import pygtop
class InteractionPropertyTests(TestCase):
def test_can_get_gtop_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.gtop_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_external_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_external_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_all_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_pdbs()
self.assertIn("4IAQ", pdbs)
|
<commit_before><commit_msg>Add integrated tests for interaction PDBs<commit_after>from unittest import TestCase
import pygtop
class InteractionPropertyTests(TestCase):
def test_can_get_gtop_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.gtop_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_external_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_external_pdbs()
self.assertIn("4IAQ", pdbs)
def test_can_get_all_pdbs(self):
target = pygtop.get_target_by_id(2)
interaction = target.get_interaction_by_id(143)
pdbs = interaction.all_pdbs()
self.assertIn("4IAQ", pdbs)
|
|
ca1a8434fd16100509c6ab0c197e562ff767fa06
|
basics/read_input.py
|
basics/read_input.py
|
foo = input("Try to type eval(2+2), if the result is '4' this is unsafe: ")
print("You typed the following: \"",foo,"\".")
input("Press [Enter] to exit")
# Result: seems to be safe with Python 3.1
# Found original tip here: http://www.daniweb.com/forums/thread12326.html
# It's from back in 2004, might have been unsafe then
|
Check to see if input is executed
|
Check to see if input is executed
|
Python
|
mit
|
RobertGresdal/python
|
Check to see if input is executed
|
foo = input("Try to type eval(2+2), if the result is '4' this is unsafe: ")
print("You typed the following: \"",foo,"\".")
input("Press [Enter] to exit")
# Result: seems to be safe with Python 3.1
# Found original tip here: http://www.daniweb.com/forums/thread12326.html
# It's from back in 2004, might have been unsafe then
|
<commit_before><commit_msg>Check to see if input is executed<commit_after>
|
foo = input("Try to type eval(2+2), if the result is '4' this is unsafe: ")
print("You typed the following: \"",foo,"\".")
input("Press [Enter] to exit")
# Result: seems to be safe with Python 3.1
# Found original tip here: http://www.daniweb.com/forums/thread12326.html
# It's from back in 2004, might have been unsafe then
|
Check to see if input is executedfoo = input("Try to type eval(2+2), if the result is '4' this is unsafe: ")
print("You typed the following: \"",foo,"\".")
input("Press [Enter] to exit")
# Result: seems to be safe with Python 3.1
# Found original tip here: http://www.daniweb.com/forums/thread12326.html
# It's from back in 2004, might have been unsafe then
|
<commit_before><commit_msg>Check to see if input is executed<commit_after>foo = input("Try to type eval(2+2), if the result is '4' this is unsafe: ")
print("You typed the following: \"",foo,"\".")
input("Press [Enter] to exit")
# Result: seems to be safe with Python 3.1
# Found original tip here: http://www.daniweb.com/forums/thread12326.html
# It's from back in 2004, might have been unsafe then
|
|
76bf2bd2a3a570b8ab7632c709deb6fcd9bc20a4
|
custom/enikshay/management/commands/resolve_duplicate_persons.py
|
custom/enikshay/management/commands/resolve_duplicate_persons.py
|
from __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
import csv
import datetime
from django.core.management.base import BaseCommand
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import CASE_TYPE_PERSON
from custom.enikshay.duplicate_ids import get_cases_with_duplicate_ids
class Command(BaseCommand):
help = """
Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
"""
# TODO what are the headers we need?
logfile_fields = ['name', 'dto_name', 'phi_name', 'owner_id', 'dob', 'phone_number']
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument(
'--commit',
action='store_true',
dest='commit',
default=False,
)
def handle(self, domain, **options):
self.domain = domain
commit = options['commit']
filename = '{}-{}.csv'.format(self.__module__.split('.')[-1],
datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
print("Logging actions to {}".format(filename))
with open(filename, 'w') as f:
logfile = csv.DictWriter(f, self.logfile_fields, extrasaction='ignore')
logfile.writeheader()
print("Finding duplicates")
bad_cases = get_cases_with_duplicate_ids(self.domain, CASE_TYPE_PERSON)
print("Processing duplicate cases")
for person_case in with_progress_bar(bad_cases):
updates = self.get_updates(person_case)
case_info = self.get_person_case_info(person_case)
for update in updates:
logfile.writerow(update)
bulk_update_cases(self.domain, updates, self.__module__)
@staticmethod
def get_updates(person_case):
pass
@property
@memoized
def districts_by_id(self):
locs = SQLLocation.objects.filter(domain=self.domain, location_type__code='dto')
return defaultdict(lambda: '', (
(loc.location_id, loc.loc_name) for loc in locs
))
def get_person_case_info(self, person_case):
"""Pull info that we want to log but not update"""
person = person_case.dynamic_case_properties()
return {
'name': ' '.join(filter(None, [person.get('first_name'), person.get('last_name')])),
'dto_name': self.districts_by_id(person.get('current_address_district_choice')),
'phi_name': person.get('phi'),
'owner_id': person_case.owner_id,
'dob': person.get('dob'),
'phone_number': person.get('contact_phone_number'),
}
|
Add basic duplicate resolution cmd with log info
|
Add basic duplicate resolution cmd with log info
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add basic duplicate resolution cmd with log info
|
from __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
import csv
import datetime
from django.core.management.base import BaseCommand
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import CASE_TYPE_PERSON
from custom.enikshay.duplicate_ids import get_cases_with_duplicate_ids
class Command(BaseCommand):
help = """
Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
"""
# TODO what are the headers we need?
logfile_fields = ['name', 'dto_name', 'phi_name', 'owner_id', 'dob', 'phone_number']
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument(
'--commit',
action='store_true',
dest='commit',
default=False,
)
def handle(self, domain, **options):
self.domain = domain
commit = options['commit']
filename = '{}-{}.csv'.format(self.__module__.split('.')[-1],
datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
print("Logging actions to {}".format(filename))
with open(filename, 'w') as f:
logfile = csv.DictWriter(f, self.logfile_fields, extrasaction='ignore')
logfile.writeheader()
print("Finding duplicates")
bad_cases = get_cases_with_duplicate_ids(self.domain, CASE_TYPE_PERSON)
print("Processing duplicate cases")
for person_case in with_progress_bar(bad_cases):
updates = self.get_updates(person_case)
case_info = self.get_person_case_info(person_case)
for update in updates:
logfile.writerow(update)
bulk_update_cases(self.domain, updates, self.__module__)
@staticmethod
def get_updates(person_case):
pass
@property
@memoized
def districts_by_id(self):
locs = SQLLocation.objects.filter(domain=self.domain, location_type__code='dto')
return defaultdict(lambda: '', (
(loc.location_id, loc.loc_name) for loc in locs
))
def get_person_case_info(self, person_case):
"""Pull info that we want to log but not update"""
person = person_case.dynamic_case_properties()
return {
'name': ' '.join(filter(None, [person.get('first_name'), person.get('last_name')])),
'dto_name': self.districts_by_id(person.get('current_address_district_choice')),
'phi_name': person.get('phi'),
'owner_id': person_case.owner_id,
'dob': person.get('dob'),
'phone_number': person.get('contact_phone_number'),
}
|
<commit_before><commit_msg>Add basic duplicate resolution cmd with log info<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
import csv
import datetime
from django.core.management.base import BaseCommand
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import CASE_TYPE_PERSON
from custom.enikshay.duplicate_ids import get_cases_with_duplicate_ids
class Command(BaseCommand):
help = """
Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
"""
# TODO what are the headers we need?
logfile_fields = ['name', 'dto_name', 'phi_name', 'owner_id', 'dob', 'phone_number']
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument(
'--commit',
action='store_true',
dest='commit',
default=False,
)
def handle(self, domain, **options):
self.domain = domain
commit = options['commit']
filename = '{}-{}.csv'.format(self.__module__.split('.')[-1],
datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
print("Logging actions to {}".format(filename))
with open(filename, 'w') as f:
logfile = csv.DictWriter(f, self.logfile_fields, extrasaction='ignore')
logfile.writeheader()
print("Finding duplicates")
bad_cases = get_cases_with_duplicate_ids(self.domain, CASE_TYPE_PERSON)
print("Processing duplicate cases")
for person_case in with_progress_bar(bad_cases):
updates = self.get_updates(person_case)
case_info = self.get_person_case_info(person_case)
for update in updates:
logfile.writerow(update)
bulk_update_cases(self.domain, updates, self.__module__)
@staticmethod
def get_updates(person_case):
pass
@property
@memoized
def districts_by_id(self):
locs = SQLLocation.objects.filter(domain=self.domain, location_type__code='dto')
return defaultdict(lambda: '', (
(loc.location_id, loc.loc_name) for loc in locs
))
def get_person_case_info(self, person_case):
"""Pull info that we want to log but not update"""
person = person_case.dynamic_case_properties()
return {
'name': ' '.join(filter(None, [person.get('first_name'), person.get('last_name')])),
'dto_name': self.districts_by_id(person.get('current_address_district_choice')),
'phi_name': person.get('phi'),
'owner_id': person_case.owner_id,
'dob': person.get('dob'),
'phone_number': person.get('contact_phone_number'),
}
|
Add basic duplicate resolution cmd with log infofrom __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
import csv
import datetime
from django.core.management.base import BaseCommand
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import CASE_TYPE_PERSON
from custom.enikshay.duplicate_ids import get_cases_with_duplicate_ids
class Command(BaseCommand):
help = """
Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
"""
# TODO what are the headers we need?
logfile_fields = ['name', 'dto_name', 'phi_name', 'owner_id', 'dob', 'phone_number']
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument(
'--commit',
action='store_true',
dest='commit',
default=False,
)
def handle(self, domain, **options):
self.domain = domain
commit = options['commit']
filename = '{}-{}.csv'.format(self.__module__.split('.')[-1],
datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
print("Logging actions to {}".format(filename))
with open(filename, 'w') as f:
logfile = csv.DictWriter(f, self.logfile_fields, extrasaction='ignore')
logfile.writeheader()
print("Finding duplicates")
bad_cases = get_cases_with_duplicate_ids(self.domain, CASE_TYPE_PERSON)
print("Processing duplicate cases")
for person_case in with_progress_bar(bad_cases):
updates = self.get_updates(person_case)
case_info = self.get_person_case_info(person_case)
for update in updates:
logfile.writerow(update)
bulk_update_cases(self.domain, updates, self.__module__)
@staticmethod
def get_updates(person_case):
pass
@property
@memoized
def districts_by_id(self):
locs = SQLLocation.objects.filter(domain=self.domain, location_type__code='dto')
return defaultdict(lambda: '', (
(loc.location_id, loc.loc_name) for loc in locs
))
def get_person_case_info(self, person_case):
"""Pull info that we want to log but not update"""
person = person_case.dynamic_case_properties()
return {
'name': ' '.join(filter(None, [person.get('first_name'), person.get('last_name')])),
'dto_name': self.districts_by_id(person.get('current_address_district_choice')),
'phi_name': person.get('phi'),
'owner_id': person_case.owner_id,
'dob': person.get('dob'),
'phone_number': person.get('contact_phone_number'),
}
|
<commit_before><commit_msg>Add basic duplicate resolution cmd with log info<commit_after>from __future__ import absolute_import
from __future__ import print_function
from collections import defaultdict
import csv
import datetime
from django.core.management.base import BaseCommand
from dimagi.utils.decorators.memoized import memoized
from corehq.apps.locations.models import SQLLocation
from corehq.apps.hqcase.utils import bulk_update_cases
from corehq.util.log import with_progress_bar
from custom.enikshay.case_utils import CASE_TYPE_PERSON
from custom.enikshay.duplicate_ids import get_cases_with_duplicate_ids
class Command(BaseCommand):
help = """
Finds cases with duplicate IDs and marks all but one of each ID as a duplicate
"""
# TODO what are the headers we need?
logfile_fields = ['name', 'dto_name', 'phi_name', 'owner_id', 'dob', 'phone_number']
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument(
'--commit',
action='store_true',
dest='commit',
default=False,
)
def handle(self, domain, **options):
self.domain = domain
commit = options['commit']
filename = '{}-{}.csv'.format(self.__module__.split('.')[-1],
datetime.datetime.now().strftime('%Y-%m-%d_%H.%M.%S'))
print("Logging actions to {}".format(filename))
with open(filename, 'w') as f:
logfile = csv.DictWriter(f, self.logfile_fields, extrasaction='ignore')
logfile.writeheader()
print("Finding duplicates")
bad_cases = get_cases_with_duplicate_ids(self.domain, CASE_TYPE_PERSON)
print("Processing duplicate cases")
for person_case in with_progress_bar(bad_cases):
updates = self.get_updates(person_case)
case_info = self.get_person_case_info(person_case)
for update in updates:
logfile.writerow(update)
bulk_update_cases(self.domain, updates, self.__module__)
@staticmethod
def get_updates(person_case):
pass
@property
@memoized
def districts_by_id(self):
locs = SQLLocation.objects.filter(domain=self.domain, location_type__code='dto')
return defaultdict(lambda: '', (
(loc.location_id, loc.loc_name) for loc in locs
))
def get_person_case_info(self, person_case):
"""Pull info that we want to log but not update"""
person = person_case.dynamic_case_properties()
return {
'name': ' '.join(filter(None, [person.get('first_name'), person.get('last_name')])),
'dto_name': self.districts_by_id(person.get('current_address_district_choice')),
'phi_name': person.get('phi'),
'owner_id': person_case.owner_id,
'dob': person.get('dob'),
'phone_number': person.get('contact_phone_number'),
}
|
|
63ff6313c1200910b749dc8d8488d6c7f2cd9c5f
|
axelrod/tests/unit/test_classification.py
|
axelrod/tests/unit/test_classification.py
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
Add a test that checks that different instances can have different behaviour.
|
Add a test that checks that different instances can have different
behaviour.
|
Python
|
mit
|
emmagordon/Axelrod,emmagordon/Axelrod,risicle/Axelrod,kathryncrouch/Axelrod,bootandy/Axelrod,uglyfruitcake/Axelrod,mojones/Axelrod,uglyfruitcake/Axelrod,bootandy/Axelrod,mojones/Axelrod,kathryncrouch/Axelrod,risicle/Axelrod
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
Add a test that checks that different instances can have different
behaviour.
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
<commit_before>"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
<commit_msg>Add a test that checks that different instances can have different
behaviour.<commit_after>
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
Add a test that checks that different instances can have different
behaviour."""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
<commit_before>"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
<commit_msg>Add a test that checks that different instances can have different
behaviour.<commit_after>"""Tests for the classification"""
import unittest
import axelrod
class TestClassification(unittest.TestCase):
def test_known_classifiers(self):
# Grabbing all the strategies: this will be changed to just be
# `axelrod.strategies`
strategies = axelrod.basic_strategies
strategies += axelrod.ordinary_strategies
strategies += axelrod.cheating_strategies
# A set of dimensions that are known to have been fully applied
known_keys = ['stochastic',
'memory_depth',
'inspects_opponent_source',
'manipulates_opponent_source',
'manipulates_opponent_state']
for s in strategies:
s = s()
self.assertTrue(None not in [s.behaviour[key] for key in known_keys])
def test_multiple_instances(self):
"""Certain instances of classes of strategies will have different
behaviours based on the initialisation variables"""
P1 = axelrod.Joss()
P2 = axelrod.Joss(0)
self.assertNotEqual(P1.behaviour, P2.behaviour)
|
1f9df27041dc9a48b7f74840fa6a7248beb55b62
|
web/problems/migrations/0002_add_secret_validator.py
|
web/problems/migrations/0002_add_secret_validator.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import utils
class Migration(migrations.Migration):
dependencies = [
('problems', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='part',
name='secret',
field=models.TextField(default=b'[]', validators=[utils.is_json_string_list]),
preserve_default=True,
),
]
|
Add a missing migration for the secret validator
|
Add a missing migration for the secret validator
|
Python
|
agpl-3.0
|
ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo,ul-fmf/projekt-tomo,matijapretnar/projekt-tomo,matijapretnar/projekt-tomo,ul-fmf/projekt-tomo
|
Add a missing migration for the secret validator
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import utils
class Migration(migrations.Migration):
dependencies = [
('problems', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='part',
name='secret',
field=models.TextField(default=b'[]', validators=[utils.is_json_string_list]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add a missing migration for the secret validator<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import utils
class Migration(migrations.Migration):
dependencies = [
('problems', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='part',
name='secret',
field=models.TextField(default=b'[]', validators=[utils.is_json_string_list]),
preserve_default=True,
),
]
|
Add a missing migration for the secret validator# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import utils
class Migration(migrations.Migration):
dependencies = [
('problems', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='part',
name='secret',
field=models.TextField(default=b'[]', validators=[utils.is_json_string_list]),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Add a missing migration for the secret validator<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import utils
class Migration(migrations.Migration):
dependencies = [
('problems', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='part',
name='secret',
field=models.TextField(default=b'[]', validators=[utils.is_json_string_list]),
preserve_default=True,
),
]
|
|
0e8c93177aff661ec7ec37f0a3175f670faefa45
|
openstack/tests/functional/compute/v2/test_server.py
|
openstack/tests/functional/compute/v2/test_server.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.compute.v2 import server
from openstack.tests.functional import base
class TestServer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestServer, cls).setUpClass()
# TODO(thowe): These values should be able to be set in clouds.yaml
flavor = '4'
image = cls.conn.image.find_image('fedora-20.x86_64')
if image is None:
image = cls.conn.image.find_image('cirros-0.3.4-x86_64-uec')
if image is None:
image = cls.conn.image.images().next()
netid = ''
if netid:
args = {'networks': [{"uuid": netid}]}
else:
args = {}
sot = cls.conn.compute.create_server(
name=cls.NAME, flavor=flavor, image=image.id, **args)
assert isinstance(sot, server.Server)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.compute.delete_server(cls.ID)
cls.assertIs(None, sot)
def test_find(self):
sot = self.conn.compute.find_server(self.NAME)
self.assertEqual(self.ID, sot.id)
def test_get(self):
sot = self.conn.compute.get_server(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.compute.servers()]
self.assertIn(self.NAME, names)
|
Add functional tests for servers
|
Add functional tests for servers
Change-Id: Iff4101b3d45e04d7f6e25b5f599a7647c8cd2a80
|
Python
|
apache-2.0
|
stackforge/python-openstacksdk,briancurtin/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk,dudymas/python-openstacksdk,dudymas/python-openstacksdk,mtougeron/python-openstacksdk,briancurtin/python-openstacksdk,stackforge/python-openstacksdk,mtougeron/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk
|
Add functional tests for servers
Change-Id: Iff4101b3d45e04d7f6e25b5f599a7647c8cd2a80
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.compute.v2 import server
from openstack.tests.functional import base
class TestServer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestServer, cls).setUpClass()
# TODO(thowe): These values should be able to be set in clouds.yaml
flavor = '4'
image = cls.conn.image.find_image('fedora-20.x86_64')
if image is None:
image = cls.conn.image.find_image('cirros-0.3.4-x86_64-uec')
if image is None:
image = cls.conn.image.images().next()
netid = ''
if netid:
args = {'networks': [{"uuid": netid}]}
else:
args = {}
sot = cls.conn.compute.create_server(
name=cls.NAME, flavor=flavor, image=image.id, **args)
assert isinstance(sot, server.Server)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.compute.delete_server(cls.ID)
cls.assertIs(None, sot)
def test_find(self):
sot = self.conn.compute.find_server(self.NAME)
self.assertEqual(self.ID, sot.id)
def test_get(self):
sot = self.conn.compute.get_server(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.compute.servers()]
self.assertIn(self.NAME, names)
|
<commit_before><commit_msg>Add functional tests for servers
Change-Id: Iff4101b3d45e04d7f6e25b5f599a7647c8cd2a80<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.compute.v2 import server
from openstack.tests.functional import base
class TestServer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestServer, cls).setUpClass()
# TODO(thowe): These values should be able to be set in clouds.yaml
flavor = '4'
image = cls.conn.image.find_image('fedora-20.x86_64')
if image is None:
image = cls.conn.image.find_image('cirros-0.3.4-x86_64-uec')
if image is None:
image = cls.conn.image.images().next()
netid = ''
if netid:
args = {'networks': [{"uuid": netid}]}
else:
args = {}
sot = cls.conn.compute.create_server(
name=cls.NAME, flavor=flavor, image=image.id, **args)
assert isinstance(sot, server.Server)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.compute.delete_server(cls.ID)
cls.assertIs(None, sot)
def test_find(self):
sot = self.conn.compute.find_server(self.NAME)
self.assertEqual(self.ID, sot.id)
def test_get(self):
sot = self.conn.compute.get_server(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.compute.servers()]
self.assertIn(self.NAME, names)
|
Add functional tests for servers
Change-Id: Iff4101b3d45e04d7f6e25b5f599a7647c8cd2a80# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.compute.v2 import server
from openstack.tests.functional import base
class TestServer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestServer, cls).setUpClass()
# TODO(thowe): These values should be able to be set in clouds.yaml
flavor = '4'
image = cls.conn.image.find_image('fedora-20.x86_64')
if image is None:
image = cls.conn.image.find_image('cirros-0.3.4-x86_64-uec')
if image is None:
image = cls.conn.image.images().next()
netid = ''
if netid:
args = {'networks': [{"uuid": netid}]}
else:
args = {}
sot = cls.conn.compute.create_server(
name=cls.NAME, flavor=flavor, image=image.id, **args)
assert isinstance(sot, server.Server)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.compute.delete_server(cls.ID)
cls.assertIs(None, sot)
def test_find(self):
sot = self.conn.compute.find_server(self.NAME)
self.assertEqual(self.ID, sot.id)
def test_get(self):
sot = self.conn.compute.get_server(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.compute.servers()]
self.assertIn(self.NAME, names)
|
<commit_before><commit_msg>Add functional tests for servers
Change-Id: Iff4101b3d45e04d7f6e25b5f599a7647c8cd2a80<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.compute.v2 import server
from openstack.tests.functional import base
class TestServer(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestServer, cls).setUpClass()
# TODO(thowe): These values should be able to be set in clouds.yaml
flavor = '4'
image = cls.conn.image.find_image('fedora-20.x86_64')
if image is None:
image = cls.conn.image.find_image('cirros-0.3.4-x86_64-uec')
if image is None:
image = cls.conn.image.images().next()
netid = ''
if netid:
args = {'networks': [{"uuid": netid}]}
else:
args = {}
sot = cls.conn.compute.create_server(
name=cls.NAME, flavor=flavor, image=image.id, **args)
assert isinstance(sot, server.Server)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.compute.delete_server(cls.ID)
cls.assertIs(None, sot)
def test_find(self):
sot = self.conn.compute.find_server(self.NAME)
self.assertEqual(self.ID, sot.id)
def test_get(self):
sot = self.conn.compute.get_server(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.compute.servers()]
self.assertIn(self.NAME, names)
|
|
b50a1859d09a9172225a21a96748bfdf9a13515f
|
indra/tests/test_uniprot_client.py
|
indra/tests/test_uniprot_client.py
|
from indra.databases import uniprot_client
def test_query_protein_exists():
g = uniprot_client.query_protein('P00533')
assert(g is not None)
def test_query_protein_nonexist():
g = uniprot_client.query_protein('XXXX')
assert(g is None)
def test_get_family_members():
members = uniprot_client.get_family_members('RAF')
assert('ARAF' in members)
assert('BRAF' in members)
assert('RAF1' in members)
def test_get_hgnc_name_human():
g = uniprot_client.query_protein('P00533')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name == 'EGFR')
def test_get_hgnc_name_nonhuman():
g = uniprot_client.query_protein('P31938')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name is None)
def test_get_gene_name_human():
g = uniprot_client.query_protein('P00533')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'EGFR')
def test_get_gene_name_nonhuman():
g = uniprot_client.query_protein('P31938')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'Map2k1')
def test_get_sequence():
g = uniprot_client.query_protein('P00533')
seq = uniprot_client.get_sequence(g)
assert(len(seq) > 1000)
def test_get_modifications():
g = uniprot_client.query_protein('P27361')
mods = uniprot_client.get_modifications(g)
assert(('Phosphothreonine', 202) in mods)
assert(('Phosphotyrosine', 204) in mods)
def test_verify_location():
g = uniprot_client.query_protein('P27361')
assert(uniprot_client.verify_location(g, 'T', 202))
assert(not uniprot_client.verify_location(g, 'S', 202))
assert(not uniprot_client.verify_location(g, 'T', -1))
assert(not uniprot_client.verify_location(g, 'T', 10000))
|
Add tests for UniProt client
|
Add tests for UniProt client
|
Python
|
bsd-2-clause
|
johnbachman/belpy,jmuhlich/indra,johnbachman/belpy,pvtodorov/indra,jmuhlich/indra,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,jmuhlich/indra,sorgerlab/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,bgyori/indra,sorgerlab/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,johnbachman/belpy
|
Add tests for UniProt client
|
from indra.databases import uniprot_client
def test_query_protein_exists():
g = uniprot_client.query_protein('P00533')
assert(g is not None)
def test_query_protein_nonexist():
g = uniprot_client.query_protein('XXXX')
assert(g is None)
def test_get_family_members():
members = uniprot_client.get_family_members('RAF')
assert('ARAF' in members)
assert('BRAF' in members)
assert('RAF1' in members)
def test_get_hgnc_name_human():
g = uniprot_client.query_protein('P00533')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name == 'EGFR')
def test_get_hgnc_name_nonhuman():
g = uniprot_client.query_protein('P31938')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name is None)
def test_get_gene_name_human():
g = uniprot_client.query_protein('P00533')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'EGFR')
def test_get_gene_name_nonhuman():
g = uniprot_client.query_protein('P31938')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'Map2k1')
def test_get_sequence():
g = uniprot_client.query_protein('P00533')
seq = uniprot_client.get_sequence(g)
assert(len(seq) > 1000)
def test_get_modifications():
g = uniprot_client.query_protein('P27361')
mods = uniprot_client.get_modifications(g)
assert(('Phosphothreonine', 202) in mods)
assert(('Phosphotyrosine', 204) in mods)
def test_verify_location():
g = uniprot_client.query_protein('P27361')
assert(uniprot_client.verify_location(g, 'T', 202))
assert(not uniprot_client.verify_location(g, 'S', 202))
assert(not uniprot_client.verify_location(g, 'T', -1))
assert(not uniprot_client.verify_location(g, 'T', 10000))
|
<commit_before><commit_msg>Add tests for UniProt client<commit_after>
|
from indra.databases import uniprot_client
def test_query_protein_exists():
g = uniprot_client.query_protein('P00533')
assert(g is not None)
def test_query_protein_nonexist():
g = uniprot_client.query_protein('XXXX')
assert(g is None)
def test_get_family_members():
members = uniprot_client.get_family_members('RAF')
assert('ARAF' in members)
assert('BRAF' in members)
assert('RAF1' in members)
def test_get_hgnc_name_human():
g = uniprot_client.query_protein('P00533')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name == 'EGFR')
def test_get_hgnc_name_nonhuman():
g = uniprot_client.query_protein('P31938')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name is None)
def test_get_gene_name_human():
g = uniprot_client.query_protein('P00533')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'EGFR')
def test_get_gene_name_nonhuman():
g = uniprot_client.query_protein('P31938')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'Map2k1')
def test_get_sequence():
g = uniprot_client.query_protein('P00533')
seq = uniprot_client.get_sequence(g)
assert(len(seq) > 1000)
def test_get_modifications():
g = uniprot_client.query_protein('P27361')
mods = uniprot_client.get_modifications(g)
assert(('Phosphothreonine', 202) in mods)
assert(('Phosphotyrosine', 204) in mods)
def test_verify_location():
g = uniprot_client.query_protein('P27361')
assert(uniprot_client.verify_location(g, 'T', 202))
assert(not uniprot_client.verify_location(g, 'S', 202))
assert(not uniprot_client.verify_location(g, 'T', -1))
assert(not uniprot_client.verify_location(g, 'T', 10000))
|
Add tests for UniProt clientfrom indra.databases import uniprot_client
def test_query_protein_exists():
g = uniprot_client.query_protein('P00533')
assert(g is not None)
def test_query_protein_nonexist():
g = uniprot_client.query_protein('XXXX')
assert(g is None)
def test_get_family_members():
members = uniprot_client.get_family_members('RAF')
assert('ARAF' in members)
assert('BRAF' in members)
assert('RAF1' in members)
def test_get_hgnc_name_human():
g = uniprot_client.query_protein('P00533')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name == 'EGFR')
def test_get_hgnc_name_nonhuman():
g = uniprot_client.query_protein('P31938')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name is None)
def test_get_gene_name_human():
g = uniprot_client.query_protein('P00533')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'EGFR')
def test_get_gene_name_nonhuman():
g = uniprot_client.query_protein('P31938')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'Map2k1')
def test_get_sequence():
g = uniprot_client.query_protein('P00533')
seq = uniprot_client.get_sequence(g)
assert(len(seq) > 1000)
def test_get_modifications():
g = uniprot_client.query_protein('P27361')
mods = uniprot_client.get_modifications(g)
assert(('Phosphothreonine', 202) in mods)
assert(('Phosphotyrosine', 204) in mods)
def test_verify_location():
g = uniprot_client.query_protein('P27361')
assert(uniprot_client.verify_location(g, 'T', 202))
assert(not uniprot_client.verify_location(g, 'S', 202))
assert(not uniprot_client.verify_location(g, 'T', -1))
assert(not uniprot_client.verify_location(g, 'T', 10000))
|
<commit_before><commit_msg>Add tests for UniProt client<commit_after>from indra.databases import uniprot_client
def test_query_protein_exists():
g = uniprot_client.query_protein('P00533')
assert(g is not None)
def test_query_protein_nonexist():
g = uniprot_client.query_protein('XXXX')
assert(g is None)
def test_get_family_members():
members = uniprot_client.get_family_members('RAF')
assert('ARAF' in members)
assert('BRAF' in members)
assert('RAF1' in members)
def test_get_hgnc_name_human():
g = uniprot_client.query_protein('P00533')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name == 'EGFR')
def test_get_hgnc_name_nonhuman():
g = uniprot_client.query_protein('P31938')
hgnc_name = uniprot_client.get_hgnc_name(g)
assert(hgnc_name is None)
def test_get_gene_name_human():
g = uniprot_client.query_protein('P00533')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'EGFR')
def test_get_gene_name_nonhuman():
g = uniprot_client.query_protein('P31938')
gene_name = uniprot_client.get_gene_name(g)
assert(gene_name == 'Map2k1')
def test_get_sequence():
g = uniprot_client.query_protein('P00533')
seq = uniprot_client.get_sequence(g)
assert(len(seq) > 1000)
def test_get_modifications():
g = uniprot_client.query_protein('P27361')
mods = uniprot_client.get_modifications(g)
assert(('Phosphothreonine', 202) in mods)
assert(('Phosphotyrosine', 204) in mods)
def test_verify_location():
g = uniprot_client.query_protein('P27361')
assert(uniprot_client.verify_location(g, 'T', 202))
assert(not uniprot_client.verify_location(g, 'S', 202))
assert(not uniprot_client.verify_location(g, 'T', -1))
assert(not uniprot_client.verify_location(g, 'T', 10000))
|
|
f7f19b5074fa28431f8d5fc4d3284f61542423fd
|
scripts/migration/migrate_index_for_existing_files.py
|
scripts/migration/migrate_index_for_existing_files.py
|
"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file._id, file.name))
if not dry_run:
file.save()
if __name__ == '__main__':
main()
|
Add migration to save every file so that it can be indexed
|
Add migration to save every file so that it can be indexed
|
Python
|
apache-2.0
|
pattisdr/osf.io,zamattiac/osf.io,mluo613/osf.io,ZobairAlijan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io,chrisseto/osf.io,saradbowman/osf.io,aaxelb/osf.io,zachjanicki/osf.io,wearpants/osf.io,zachjanicki/osf.io,mfraezz/osf.io,kwierman/osf.io,TomHeatwole/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,RomanZWang/osf.io,saradbowman/osf.io,pattisdr/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,zachjanicki/osf.io,mluo613/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,abought/osf.io,DanielSBrown/osf.io,ZobairAlijan/osf.io,alexschiller/osf.io,billyhunt/osf.io,felliott/osf.io,mluo613/osf.io,jnayak1/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,DanielSBrown/osf.io,binoculars/osf.io,DanielSBrown/osf.io,emetsger/osf.io,amyshi188/osf.io,hmoco/osf.io,jnayak1/osf.io,caseyrygt/osf.io,doublebits/osf.io,cslzchen/osf.io,chennan47/osf.io,erinspace/osf.io,caseyrygt/osf.io,mluke93/osf.io,erinspace/osf.io,caneruguz/osf.io,rdhyee/osf.io,samanehsan/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,sloria/osf.io,emetsger/osf.io,KAsante95/osf.io,acshi/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,KAsante95/osf.io,billyhunt/osf.io,TomBaxter/osf.io,brandonPurvis/osf.io,caseyrygt/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,kwierman/osf.io,crcresearch/osf.io,crcresearch/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,cwisecarver/osf.io,aaxelb/osf.io,DanielSBrown/osf.io,samanehsan/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,GageGaskins/osf.io,jnayak1/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,TomBaxter/osf.io,leb2dg/osf.io,baylee-d/osf.io,binoculars/osf.io,asanfilippo7/osf.io,felliott/osf.io,emetsger/osf.io,TomBaxter/osf.io,GageGaskins/osf.io,SSJohns/osf.io,amyshi188/osf.io,asanfilippo7/osf.io,leb2dg/osf.io,acshi/osf.io,icereval/osf.io,kch8qx/osf.io,caneruguz/osf.io,wearpants/osf.io,cslzchen/osf.io,doublebits/osf.io,RomanZWang/osf.io,caneruguz/osf.io,adlius/osf.io,hmoco/osf.io,billyhunt/osf.io,mluke93/osf.io,crcresearch/osf.io,doublebits/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,danielneis/osf.io,mluke93/osf.io,ZobairAlijan/osf.io,doublebits/osf.io,felliott/osf.io,jnayak1/osf.io,emetsger/osf.io,Ghalko/osf.io,acshi/osf.io,sloria/osf.io,cwisecarver/osf.io,mfraezz/osf.io,mluke93/osf.io,alexschiller/osf.io,brianjgeiger/osf.io,mluo613/osf.io,icereval/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,laurenrevere/osf.io,amyshi188/osf.io,felliott/osf.io,caneruguz/osf.io,hmoco/osf.io,mfraezz/osf.io,cslzchen/osf.io,rdhyee/osf.io,haoyuchen1992/osf.io,GageGaskins/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,rdhyee/osf.io,zamattiac/osf.io,sloria/osf.io,GageGaskins/osf.io,abought/osf.io,samanehsan/osf.io,adlius/osf.io,samchrisinger/osf.io,abought/osf.io,monikagrabowska/osf.io,chennan47/osf.io,erinspace/osf.io,mluo613/osf.io,danielneis/osf.io,brandonPurvis/osf.io,RomanZWang/osf.io,KAsante95/osf.io,billyhunt/osf.io,doublebits/osf.io,KAsante95/osf.io,samchrisinger/osf.io,danielneis/osf.io,wearpants/osf.io,RomanZWang/osf.io,wearpants/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,KAsante95/osf.io,baylee-d/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,adlius/osf.io,mattclark/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,amyshi188/osf.io,leb2dg/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,binoculars/osf.io,SSJohns/osf.io,billyhunt/osf.io,samchrisinger/osf.io,acshi/osf.io,chrisseto/osf.io,hmoco/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,HalcyonChimera/osf.io,Ghalko/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,chennan47/osf.io,adlius/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,samanehsan/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,SSJohns/osf.io,kwierman/osf.io,alexschiller/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,zamattiac/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,icereval/osf.io,pattisdr/osf.io,kch8qx/osf.io,danielneis/osf.io,chrisseto/osf.io,Ghalko/osf.io,acshi/osf.io,Nesiehr/osf.io,rdhyee/osf.io,abought/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,brandonPurvis/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,SSJohns/osf.io
|
Add migration to save every file so that it can be indexed
|
"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file._id, file.name))
if not dry_run:
file.save()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration to save every file so that it can be indexed<commit_after>
|
"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file._id, file.name))
if not dry_run:
file.save()
if __name__ == '__main__':
main()
|
Add migration to save every file so that it can be indexed"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file._id, file.name))
if not dry_run:
file.save()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add migration to save every file so that it can be indexed<commit_after>"""
Saves every file to have new save() logic index those files.
"""
import sys
import logging
from website.app import init_app
from website.files.models.osfstorage import OsfStorageFile
logger = logging.getLogger(__name__)
def main():
init_app(routes=False)
dry_run = 'dry' in sys.argv
logger.warn('Current files will now be updated to be indexed if necessary')
if dry_run:
logger.warn('Dry_run mode')
for file in OsfStorageFile.find():
logger.info('File with _id {0} and name {1} has been saved.'.format(file._id, file.name))
if not dry_run:
file.save()
if __name__ == '__main__':
main()
|
|
2ba2d08804280cfdfa2c2ae6ca516fb68dd98ea7
|
marco/marco/wsgi_web410.py
|
marco/marco/wsgi_web410.py
|
"""WSGI File for Web410
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "marco.settings")
activate_this = '/home/point97/env/marco_portal2/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Add wsgi file for web410
|
MP-795: Add wsgi file for web410
|
Python
|
isc
|
Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,MidAtlanticPortal/marco-portal2,Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,Ecotrust/marineplanner-core,MidAtlanticPortal/marco-portal2,MidAtlanticPortal/marco-portal2,MidAtlanticPortal/marco-portal2
|
MP-795: Add wsgi file for web410
|
"""WSGI File for Web410
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "marco.settings")
activate_this = '/home/point97/env/marco_portal2/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before><commit_msg>MP-795: Add wsgi file for web410<commit_after>
|
"""WSGI File for Web410
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "marco.settings")
activate_this = '/home/point97/env/marco_portal2/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
MP-795: Add wsgi file for web410"""WSGI File for Web410
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "marco.settings")
activate_this = '/home/point97/env/marco_portal2/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before><commit_msg>MP-795: Add wsgi file for web410<commit_after>"""WSGI File for Web410
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "marco.settings")
activate_this = '/home/point97/env/marco_portal2/bin/activate_this.py'
execfile(activate_this, dict(__file__=activate_this))
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
|
213665bceffad8919986e889882e6ebab1dedafc
|
temba/flows/migrations/0096_populate_flownodecount.py
|
temba/flows/migrations/0096_populate_flownodecount.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-30 12:50
from __future__ import unicode_literals
from django.db import migrations
def do_populate(FlowStep, FlowNodeCount):
nodes = list(FlowStep.objects.filter(left_on=None, run__is_active=True).distinct('step_uuid').values_list('run__flow_id', 'step_uuid'))
if nodes:
print("Fetched %d node UUIDs with active contacts" % len(nodes))
counts = []
for flow_id, node_uuid in nodes:
contact_count = FlowStep.objects.filter(step_uuid=node_uuid, left_on=None, run__is_active=True).count()
FlowNodeCount.objects.filter(flow_id=flow_id, node_uuid=node_uuid).delete()
counts.append(FlowNodeCount(flow_id=flow_id, node_uuid=node_uuid, count=contact_count))
FlowNodeCount.objects.bulk_create(counts, batch_size=5000)
def populate_flownodecount(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowNodeCount = apps.get_model('flows', 'FlowNodeCount')
do_populate(FlowStep, FlowNodeCount)
def apply_manual():
from temba.flows.models import FlowStep, FlowNodeCount
do_populate(FlowStep, FlowNodeCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0095_clear_old_flow_stat_cache'),
]
operations = [
migrations.RunPython(populate_flownodecount)
]
|
Add migration to populate FlowNodeCount
|
Add migration to populate FlowNodeCount
|
Python
|
agpl-3.0
|
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro
|
Add migration to populate FlowNodeCount
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-30 12:50
from __future__ import unicode_literals
from django.db import migrations
def do_populate(FlowStep, FlowNodeCount):
nodes = list(FlowStep.objects.filter(left_on=None, run__is_active=True).distinct('step_uuid').values_list('run__flow_id', 'step_uuid'))
if nodes:
print("Fetched %d node UUIDs with active contacts" % len(nodes))
counts = []
for flow_id, node_uuid in nodes:
contact_count = FlowStep.objects.filter(step_uuid=node_uuid, left_on=None, run__is_active=True).count()
FlowNodeCount.objects.filter(flow_id=flow_id, node_uuid=node_uuid).delete()
counts.append(FlowNodeCount(flow_id=flow_id, node_uuid=node_uuid, count=contact_count))
FlowNodeCount.objects.bulk_create(counts, batch_size=5000)
def populate_flownodecount(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowNodeCount = apps.get_model('flows', 'FlowNodeCount')
do_populate(FlowStep, FlowNodeCount)
def apply_manual():
from temba.flows.models import FlowStep, FlowNodeCount
do_populate(FlowStep, FlowNodeCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0095_clear_old_flow_stat_cache'),
]
operations = [
migrations.RunPython(populate_flownodecount)
]
|
<commit_before><commit_msg>Add migration to populate FlowNodeCount<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-30 12:50
from __future__ import unicode_literals
from django.db import migrations
def do_populate(FlowStep, FlowNodeCount):
nodes = list(FlowStep.objects.filter(left_on=None, run__is_active=True).distinct('step_uuid').values_list('run__flow_id', 'step_uuid'))
if nodes:
print("Fetched %d node UUIDs with active contacts" % len(nodes))
counts = []
for flow_id, node_uuid in nodes:
contact_count = FlowStep.objects.filter(step_uuid=node_uuid, left_on=None, run__is_active=True).count()
FlowNodeCount.objects.filter(flow_id=flow_id, node_uuid=node_uuid).delete()
counts.append(FlowNodeCount(flow_id=flow_id, node_uuid=node_uuid, count=contact_count))
FlowNodeCount.objects.bulk_create(counts, batch_size=5000)
def populate_flownodecount(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowNodeCount = apps.get_model('flows', 'FlowNodeCount')
do_populate(FlowStep, FlowNodeCount)
def apply_manual():
from temba.flows.models import FlowStep, FlowNodeCount
do_populate(FlowStep, FlowNodeCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0095_clear_old_flow_stat_cache'),
]
operations = [
migrations.RunPython(populate_flownodecount)
]
|
Add migration to populate FlowNodeCount# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-30 12:50
from __future__ import unicode_literals
from django.db import migrations
def do_populate(FlowStep, FlowNodeCount):
nodes = list(FlowStep.objects.filter(left_on=None, run__is_active=True).distinct('step_uuid').values_list('run__flow_id', 'step_uuid'))
if nodes:
print("Fetched %d node UUIDs with active contacts" % len(nodes))
counts = []
for flow_id, node_uuid in nodes:
contact_count = FlowStep.objects.filter(step_uuid=node_uuid, left_on=None, run__is_active=True).count()
FlowNodeCount.objects.filter(flow_id=flow_id, node_uuid=node_uuid).delete()
counts.append(FlowNodeCount(flow_id=flow_id, node_uuid=node_uuid, count=contact_count))
FlowNodeCount.objects.bulk_create(counts, batch_size=5000)
def populate_flownodecount(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowNodeCount = apps.get_model('flows', 'FlowNodeCount')
do_populate(FlowStep, FlowNodeCount)
def apply_manual():
from temba.flows.models import FlowStep, FlowNodeCount
do_populate(FlowStep, FlowNodeCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0095_clear_old_flow_stat_cache'),
]
operations = [
migrations.RunPython(populate_flownodecount)
]
|
<commit_before><commit_msg>Add migration to populate FlowNodeCount<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-03-30 12:50
from __future__ import unicode_literals
from django.db import migrations
def do_populate(FlowStep, FlowNodeCount):
nodes = list(FlowStep.objects.filter(left_on=None, run__is_active=True).distinct('step_uuid').values_list('run__flow_id', 'step_uuid'))
if nodes:
print("Fetched %d node UUIDs with active contacts" % len(nodes))
counts = []
for flow_id, node_uuid in nodes:
contact_count = FlowStep.objects.filter(step_uuid=node_uuid, left_on=None, run__is_active=True).count()
FlowNodeCount.objects.filter(flow_id=flow_id, node_uuid=node_uuid).delete()
counts.append(FlowNodeCount(flow_id=flow_id, node_uuid=node_uuid, count=contact_count))
FlowNodeCount.objects.bulk_create(counts, batch_size=5000)
def populate_flownodecount(apps, schema_editor):
FlowStep = apps.get_model('flows', 'FlowStep')
FlowNodeCount = apps.get_model('flows', 'FlowNodeCount')
do_populate(FlowStep, FlowNodeCount)
def apply_manual():
from temba.flows.models import FlowStep, FlowNodeCount
do_populate(FlowStep, FlowNodeCount)
class Migration(migrations.Migration):
dependencies = [
('flows', '0095_clear_old_flow_stat_cache'),
]
operations = [
migrations.RunPython(populate_flownodecount)
]
|
|
f40a03747fd50afaa13c35cf5623540cb22517ec
|
timeside/server/migrations/0007_auto_20160705_1342.py
|
timeside/server/migrations/0007_auto_20160705_1342.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-05 11:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0006_analysistrack'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='preset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.Preset', verbose_name='preset'),
),
migrations.AlterField(
model_name='analysis',
name='sub_processor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.SubProcessor', verbose_name='sub_processor'),
),
migrations.AlterField(
model_name='analysistrack',
name='analysis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='server.Analysis', verbose_name='analysis'),
),
migrations.AlterField(
model_name='analysistrack',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis_tracks', to='server.Item', verbose_name='item'),
),
]
|
Add missing migrations corresponding to 5468b2e57366bc0
|
Add missing migrations corresponding to 5468b2e57366bc0
|
Python
|
agpl-3.0
|
Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide
|
Add missing migrations corresponding to 5468b2e57366bc0
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-05 11:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0006_analysistrack'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='preset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.Preset', verbose_name='preset'),
),
migrations.AlterField(
model_name='analysis',
name='sub_processor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.SubProcessor', verbose_name='sub_processor'),
),
migrations.AlterField(
model_name='analysistrack',
name='analysis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='server.Analysis', verbose_name='analysis'),
),
migrations.AlterField(
model_name='analysistrack',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis_tracks', to='server.Item', verbose_name='item'),
),
]
|
<commit_before><commit_msg>Add missing migrations corresponding to 5468b2e57366bc0<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-05 11:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0006_analysistrack'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='preset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.Preset', verbose_name='preset'),
),
migrations.AlterField(
model_name='analysis',
name='sub_processor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.SubProcessor', verbose_name='sub_processor'),
),
migrations.AlterField(
model_name='analysistrack',
name='analysis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='server.Analysis', verbose_name='analysis'),
),
migrations.AlterField(
model_name='analysistrack',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis_tracks', to='server.Item', verbose_name='item'),
),
]
|
Add missing migrations corresponding to 5468b2e57366bc0# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-05 11:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0006_analysistrack'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='preset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.Preset', verbose_name='preset'),
),
migrations.AlterField(
model_name='analysis',
name='sub_processor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.SubProcessor', verbose_name='sub_processor'),
),
migrations.AlterField(
model_name='analysistrack',
name='analysis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='server.Analysis', verbose_name='analysis'),
),
migrations.AlterField(
model_name='analysistrack',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis_tracks', to='server.Item', verbose_name='item'),
),
]
|
<commit_before><commit_msg>Add missing migrations corresponding to 5468b2e57366bc0<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-07-05 11:42
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0006_analysistrack'),
]
operations = [
migrations.AlterField(
model_name='analysis',
name='preset',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.Preset', verbose_name='preset'),
),
migrations.AlterField(
model_name='analysis',
name='sub_processor',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis', to='server.SubProcessor', verbose_name='sub_processor'),
),
migrations.AlterField(
model_name='analysistrack',
name='analysis',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='tracks', to='server.Analysis', verbose_name='analysis'),
),
migrations.AlterField(
model_name='analysistrack',
name='item',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='analysis_tracks', to='server.Item', verbose_name='item'),
),
]
|
|
140f0da41966db7e4a932ee917961c9d27eab87f
|
src/ggrc/migrations/versions/20160203143912_6bed0575a0b_migrate_assessment_to_assignable_mixin.py
|
src/ggrc/migrations/versions/20160203143912_6bed0575a0b_migrate_assessment_to_assignable_mixin.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate Assessment to Assignable mixin
Revision ID: 6bed0575a0b
Revises: 262bbe790f4c
Create Date: 2016-02-03 14:39:12.737518
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
# revision identifiers, used by Alembic.
revision = '6bed0575a0b'
down_revision = '262bbe790f4c'
def upgrade():
op.execute("""
INSERT INTO relationships (
modified_by_id, created_at, updated_at,
source_id, source_type, destination_id, destination_type,
context_id
)
SELECT
oo.modified_by_id, NOW(), NOW(),
oo.ownable_id, oo.ownable_type, oo.person_id, 'Person',
oo.context_id
FROM object_owners AS oo
WHERE oo.ownable_type = 'Assessment'
""")
op.execute("""
INSERT INTO relationship_attrs (
relationship_id, attr_name, attr_value
)
SELECT r.id, 'AssigneeType', 'Creator,Assessor'
FROM relationships AS r
WHERE r.source_type = 'Assessment';
""")
op.execute("""UPDATE assessments SET status = 'Open'""")
op.execute("""ALTER TABLE assessments
CHANGE status status
ENUM("Open","In Progress","Finished","Verified","Final")
DEFAULT "Open"
NOT NULL
""")
def downgrade():
pass
|
Add migration for assignable mixin
|
Add migration for assignable mixin
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core
|
Add migration for assignable mixin
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate Assessment to Assignable mixin
Revision ID: 6bed0575a0b
Revises: 262bbe790f4c
Create Date: 2016-02-03 14:39:12.737518
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
# revision identifiers, used by Alembic.
revision = '6bed0575a0b'
down_revision = '262bbe790f4c'
def upgrade():
op.execute("""
INSERT INTO relationships (
modified_by_id, created_at, updated_at,
source_id, source_type, destination_id, destination_type,
context_id
)
SELECT
oo.modified_by_id, NOW(), NOW(),
oo.ownable_id, oo.ownable_type, oo.person_id, 'Person',
oo.context_id
FROM object_owners AS oo
WHERE oo.ownable_type = 'Assessment'
""")
op.execute("""
INSERT INTO relationship_attrs (
relationship_id, attr_name, attr_value
)
SELECT r.id, 'AssigneeType', 'Creator,Assessor'
FROM relationships AS r
WHERE r.source_type = 'Assessment';
""")
op.execute("""UPDATE assessments SET status = 'Open'""")
op.execute("""ALTER TABLE assessments
CHANGE status status
ENUM("Open","In Progress","Finished","Verified","Final")
DEFAULT "Open"
NOT NULL
""")
def downgrade():
pass
|
<commit_before><commit_msg>Add migration for assignable mixin<commit_after>
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate Assessment to Assignable mixin
Revision ID: 6bed0575a0b
Revises: 262bbe790f4c
Create Date: 2016-02-03 14:39:12.737518
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
# revision identifiers, used by Alembic.
revision = '6bed0575a0b'
down_revision = '262bbe790f4c'
def upgrade():
op.execute("""
INSERT INTO relationships (
modified_by_id, created_at, updated_at,
source_id, source_type, destination_id, destination_type,
context_id
)
SELECT
oo.modified_by_id, NOW(), NOW(),
oo.ownable_id, oo.ownable_type, oo.person_id, 'Person',
oo.context_id
FROM object_owners AS oo
WHERE oo.ownable_type = 'Assessment'
""")
op.execute("""
INSERT INTO relationship_attrs (
relationship_id, attr_name, attr_value
)
SELECT r.id, 'AssigneeType', 'Creator,Assessor'
FROM relationships AS r
WHERE r.source_type = 'Assessment';
""")
op.execute("""UPDATE assessments SET status = 'Open'""")
op.execute("""ALTER TABLE assessments
CHANGE status status
ENUM("Open","In Progress","Finished","Verified","Final")
DEFAULT "Open"
NOT NULL
""")
def downgrade():
pass
|
Add migration for assignable mixin# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate Assessment to Assignable mixin
Revision ID: 6bed0575a0b
Revises: 262bbe790f4c
Create Date: 2016-02-03 14:39:12.737518
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
# revision identifiers, used by Alembic.
revision = '6bed0575a0b'
down_revision = '262bbe790f4c'
def upgrade():
op.execute("""
INSERT INTO relationships (
modified_by_id, created_at, updated_at,
source_id, source_type, destination_id, destination_type,
context_id
)
SELECT
oo.modified_by_id, NOW(), NOW(),
oo.ownable_id, oo.ownable_type, oo.person_id, 'Person',
oo.context_id
FROM object_owners AS oo
WHERE oo.ownable_type = 'Assessment'
""")
op.execute("""
INSERT INTO relationship_attrs (
relationship_id, attr_name, attr_value
)
SELECT r.id, 'AssigneeType', 'Creator,Assessor'
FROM relationships AS r
WHERE r.source_type = 'Assessment';
""")
op.execute("""UPDATE assessments SET status = 'Open'""")
op.execute("""ALTER TABLE assessments
CHANGE status status
ENUM("Open","In Progress","Finished","Verified","Final")
DEFAULT "Open"
NOT NULL
""")
def downgrade():
pass
|
<commit_before><commit_msg>Add migration for assignable mixin<commit_after># Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate Assessment to Assignable mixin
Revision ID: 6bed0575a0b
Revises: 262bbe790f4c
Create Date: 2016-02-03 14:39:12.737518
"""
# Disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=C0103
from alembic import op
# revision identifiers, used by Alembic.
revision = '6bed0575a0b'
down_revision = '262bbe790f4c'
def upgrade():
op.execute("""
INSERT INTO relationships (
modified_by_id, created_at, updated_at,
source_id, source_type, destination_id, destination_type,
context_id
)
SELECT
oo.modified_by_id, NOW(), NOW(),
oo.ownable_id, oo.ownable_type, oo.person_id, 'Person',
oo.context_id
FROM object_owners AS oo
WHERE oo.ownable_type = 'Assessment'
""")
op.execute("""
INSERT INTO relationship_attrs (
relationship_id, attr_name, attr_value
)
SELECT r.id, 'AssigneeType', 'Creator,Assessor'
FROM relationships AS r
WHERE r.source_type = 'Assessment';
""")
op.execute("""UPDATE assessments SET status = 'Open'""")
op.execute("""ALTER TABLE assessments
CHANGE status status
ENUM("Open","In Progress","Finished","Verified","Final")
DEFAULT "Open"
NOT NULL
""")
def downgrade():
pass
|
|
2e173a0d8503a152eba1a99ee66a7a81127496a8
|
fluentcheck/tests/test_type_hierarchy.py
|
fluentcheck/tests/test_type_hierarchy.py
|
import unittest
from fluentcheck.check import Check, CheckError
class ParentA:
def __init__(self):
pass
class Child(ParentA):
def __init__(self):
pass
class GrandChild(Child):
def __init__(self):
pass
class ParentB:
def __init__(self):
pass
class ChildOfMultipleParents(ParentA, ParentB):
def __init__(self):
pass
class TestTypeHierarchy(unittest.TestCase):
def test_is_subtype_of(self):
res = Check(Child()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_grandchild_is_subtype_of_parent(self):
res = Check(GrandChild()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(GrandChild)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_multiple_inheritance(self):
res = Check(ChildOfMultipleParents()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of((ChildOfMultipleParents, ParentB))
self.fail()
except CheckError:
pass
def test_is_not_subtype_of(self):
res = Check(ParentA()).is_not_subtype_of(ParentB)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(ParentA)
self.fail()
except CheckError:
pass
def test_is_subtype_of_itself(self):
res = Check(Child()).is_subtype_of(Child)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_atleast_one_parent(self):
res = Check(Child()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of((ParentA, ParentB))
self.fail()
except CheckError:
pass
|
Add tests for type hierarchy
|
Add tests for type hierarchy
|
Python
|
mit
|
csparpa/check
|
Add tests for type hierarchy
|
import unittest
from fluentcheck.check import Check, CheckError
class ParentA:
def __init__(self):
pass
class Child(ParentA):
def __init__(self):
pass
class GrandChild(Child):
def __init__(self):
pass
class ParentB:
def __init__(self):
pass
class ChildOfMultipleParents(ParentA, ParentB):
def __init__(self):
pass
class TestTypeHierarchy(unittest.TestCase):
def test_is_subtype_of(self):
res = Check(Child()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_grandchild_is_subtype_of_parent(self):
res = Check(GrandChild()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(GrandChild)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_multiple_inheritance(self):
res = Check(ChildOfMultipleParents()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of((ChildOfMultipleParents, ParentB))
self.fail()
except CheckError:
pass
def test_is_not_subtype_of(self):
res = Check(ParentA()).is_not_subtype_of(ParentB)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(ParentA)
self.fail()
except CheckError:
pass
def test_is_subtype_of_itself(self):
res = Check(Child()).is_subtype_of(Child)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_atleast_one_parent(self):
res = Check(Child()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of((ParentA, ParentB))
self.fail()
except CheckError:
pass
|
<commit_before><commit_msg>Add tests for type hierarchy<commit_after>
|
import unittest
from fluentcheck.check import Check, CheckError
class ParentA:
def __init__(self):
pass
class Child(ParentA):
def __init__(self):
pass
class GrandChild(Child):
def __init__(self):
pass
class ParentB:
def __init__(self):
pass
class ChildOfMultipleParents(ParentA, ParentB):
def __init__(self):
pass
class TestTypeHierarchy(unittest.TestCase):
def test_is_subtype_of(self):
res = Check(Child()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_grandchild_is_subtype_of_parent(self):
res = Check(GrandChild()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(GrandChild)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_multiple_inheritance(self):
res = Check(ChildOfMultipleParents()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of((ChildOfMultipleParents, ParentB))
self.fail()
except CheckError:
pass
def test_is_not_subtype_of(self):
res = Check(ParentA()).is_not_subtype_of(ParentB)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(ParentA)
self.fail()
except CheckError:
pass
def test_is_subtype_of_itself(self):
res = Check(Child()).is_subtype_of(Child)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_atleast_one_parent(self):
res = Check(Child()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of((ParentA, ParentB))
self.fail()
except CheckError:
pass
|
Add tests for type hierarchyimport unittest
from fluentcheck.check import Check, CheckError
class ParentA:
def __init__(self):
pass
class Child(ParentA):
def __init__(self):
pass
class GrandChild(Child):
def __init__(self):
pass
class ParentB:
def __init__(self):
pass
class ChildOfMultipleParents(ParentA, ParentB):
def __init__(self):
pass
class TestTypeHierarchy(unittest.TestCase):
def test_is_subtype_of(self):
res = Check(Child()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_grandchild_is_subtype_of_parent(self):
res = Check(GrandChild()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(GrandChild)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_multiple_inheritance(self):
res = Check(ChildOfMultipleParents()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of((ChildOfMultipleParents, ParentB))
self.fail()
except CheckError:
pass
def test_is_not_subtype_of(self):
res = Check(ParentA()).is_not_subtype_of(ParentB)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(ParentA)
self.fail()
except CheckError:
pass
def test_is_subtype_of_itself(self):
res = Check(Child()).is_subtype_of(Child)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_atleast_one_parent(self):
res = Check(Child()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of((ParentA, ParentB))
self.fail()
except CheckError:
pass
|
<commit_before><commit_msg>Add tests for type hierarchy<commit_after>import unittest
from fluentcheck.check import Check, CheckError
class ParentA:
def __init__(self):
pass
class Child(ParentA):
def __init__(self):
pass
class GrandChild(Child):
def __init__(self):
pass
class ParentB:
def __init__(self):
pass
class ChildOfMultipleParents(ParentA, ParentB):
def __init__(self):
pass
class TestTypeHierarchy(unittest.TestCase):
def test_is_subtype_of(self):
res = Check(Child()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_grandchild_is_subtype_of_parent(self):
res = Check(GrandChild()).is_subtype_of(ParentA)
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of(GrandChild)
self.fail()
except CheckError:
pass
def test_is_subtype_of_when_multiple_inheritance(self):
res = Check(ChildOfMultipleParents()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(ParentA()).is_subtype_of((ChildOfMultipleParents, ParentB))
self.fail()
except CheckError:
pass
def test_is_not_subtype_of(self):
res = Check(ParentA()).is_not_subtype_of(ParentB)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(ParentA)
self.fail()
except CheckError:
pass
def test_is_subtype_of_itself(self):
res = Check(Child()).is_subtype_of(Child)
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of(Child)
self.fail()
except CheckError:
pass
def test_is_subtype_of_atleast_one_parent(self):
res = Check(Child()).is_subtype_of((ParentA, ParentB))
self.assertIsInstance(res, Check)
try:
Check(Child()).is_not_subtype_of((ParentA, ParentB))
self.fail()
except CheckError:
pass
|
|
6789fbc32400dd3b32a39881ffdacfd1a3729fa0
|
gmn/src/d1_gmn/tests/test_mgmt_import.py
|
gmn/src/d1_gmn/tests/test_mgmt_import.py
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk importer management command
"""
from __future__ import absolute_import
import responses
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.mock_api.get as mock_get
import d1_test.mock_api.get_log_records as mock_log_records
import d1_test.mock_api.get_system_metadata as mock_get_system_metadata
import d1_test.mock_api.list_objects as mock_object_list
import django
import django.core.management
@d1_test.d1_test_case.reproducible_random_decorator('TestMgmtImport')
class TestMgmtImport(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self):
mock_object_list.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_log_records.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_get_system_metadata.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
mock_get.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
with self.mock.disable_management_command_logging():
with d1_test.d1_test_case.capture_log() as log_stream:
with d1_test.d1_test_case.disable_debug_level_logging():
django.core.management.call_command(
'import', '--force', '--major=2',
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
log_str = log_stream.getvalue()
self.sample.assert_equals(log_str, 'import')
|
Add tests for bulk importer
|
Add tests for bulk importer
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
Add tests for bulk importer
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk importer management command
"""
from __future__ import absolute_import
import responses
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.mock_api.get as mock_get
import d1_test.mock_api.get_log_records as mock_log_records
import d1_test.mock_api.get_system_metadata as mock_get_system_metadata
import d1_test.mock_api.list_objects as mock_object_list
import django
import django.core.management
@d1_test.d1_test_case.reproducible_random_decorator('TestMgmtImport')
class TestMgmtImport(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self):
mock_object_list.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_log_records.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_get_system_metadata.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
mock_get.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
with self.mock.disable_management_command_logging():
with d1_test.d1_test_case.capture_log() as log_stream:
with d1_test.d1_test_case.disable_debug_level_logging():
django.core.management.call_command(
'import', '--force', '--major=2',
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
log_str = log_stream.getvalue()
self.sample.assert_equals(log_str, 'import')
|
<commit_before><commit_msg>Add tests for bulk importer<commit_after>
|
# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk importer management command
"""
from __future__ import absolute_import
import responses
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.mock_api.get as mock_get
import d1_test.mock_api.get_log_records as mock_log_records
import d1_test.mock_api.get_system_metadata as mock_get_system_metadata
import d1_test.mock_api.list_objects as mock_object_list
import django
import django.core.management
@d1_test.d1_test_case.reproducible_random_decorator('TestMgmtImport')
class TestMgmtImport(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self):
mock_object_list.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_log_records.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_get_system_metadata.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
mock_get.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
with self.mock.disable_management_command_logging():
with d1_test.d1_test_case.capture_log() as log_stream:
with d1_test.d1_test_case.disable_debug_level_logging():
django.core.management.call_command(
'import', '--force', '--major=2',
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
log_str = log_stream.getvalue()
self.sample.assert_equals(log_str, 'import')
|
Add tests for bulk importer# -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk importer management command
"""
from __future__ import absolute_import
import responses
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.mock_api.get as mock_get
import d1_test.mock_api.get_log_records as mock_log_records
import d1_test.mock_api.get_system_metadata as mock_get_system_metadata
import d1_test.mock_api.list_objects as mock_object_list
import django
import django.core.management
@d1_test.d1_test_case.reproducible_random_decorator('TestMgmtImport')
class TestMgmtImport(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self):
mock_object_list.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_log_records.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_get_system_metadata.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
mock_get.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
with self.mock.disable_management_command_logging():
with d1_test.d1_test_case.capture_log() as log_stream:
with d1_test.d1_test_case.disable_debug_level_logging():
django.core.management.call_command(
'import', '--force', '--major=2',
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
log_str = log_stream.getvalue()
self.sample.assert_equals(log_str, 'import')
|
<commit_before><commit_msg>Add tests for bulk importer<commit_after># -*- coding: utf-8 -*-
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2016 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the bulk importer management command
"""
from __future__ import absolute_import
import responses
import d1_gmn.tests.gmn_test_case
import d1_test.d1_test_case
import d1_test.mock_api.get as mock_get
import d1_test.mock_api.get_log_records as mock_log_records
import d1_test.mock_api.get_system_metadata as mock_get_system_metadata
import d1_test.mock_api.list_objects as mock_object_list
import django
import django.core.management
@d1_test.d1_test_case.reproducible_random_decorator('TestMgmtImport')
class TestMgmtImport(d1_gmn.tests.gmn_test_case.GMNTestCase):
@responses.activate
def test_1000(self):
mock_object_list.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_log_records.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
mock_get_system_metadata.add_callback(
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
mock_get.add_callback(d1_test.d1_test_case.MOCK_REMOTE_BASE_URL)
with self.mock.disable_management_command_logging():
with d1_test.d1_test_case.capture_log() as log_stream:
with d1_test.d1_test_case.disable_debug_level_logging():
django.core.management.call_command(
'import', '--force', '--major=2',
d1_test.d1_test_case.MOCK_REMOTE_BASE_URL
)
log_str = log_stream.getvalue()
self.sample.assert_equals(log_str, 'import')
|
|
2ad1c276a96a77d2088f996ebc32fa74206d1cef
|
osf/migrations/0036_ensure_schemas.py
|
osf/migrations/0036_ensure_schemas.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-24 19:33
from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.models import MetaSchema
from website.project.metadata.schemas import OSF_META_SCHEMAS
logger = logging.getLogger(__file__)
def add_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
schema_count = 0
for schema in OSF_META_SCHEMAS:
schema_obj, created = MetaSchema.objects.get_or_create(
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.schema = schema
schema_obj.save()
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
removed_schemas = 0
for schema in OSF_META_SCHEMAS:
schema_obj = MetaSchema.objects.get(
schema=schema,
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.delete()
removed_schemas += 1
logger.info('Removed {} schemas from the database'.format(removed_schemas))
class Migration(migrations.Migration):
dependencies = [
('osf', '0035_ensure_licenses'),
]
operations = [
migrations.RunPython(add_schemas, remove_schemas),
]
|
Add migration for ensure schemas
|
Add migration for ensure schemas
|
Python
|
apache-2.0
|
CenterForOpenScience/osf.io,mattclark/osf.io,pattisdr/osf.io,caseyrollins/osf.io,crcresearch/osf.io,adlius/osf.io,erinspace/osf.io,baylee-d/osf.io,cslzchen/osf.io,laurenrevere/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,binoculars/osf.io,mattclark/osf.io,pattisdr/osf.io,sloria/osf.io,brianjgeiger/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,adlius/osf.io,caneruguz/osf.io,felliott/osf.io,chennan47/osf.io,TomBaxter/osf.io,aaxelb/osf.io,caneruguz/osf.io,leb2dg/osf.io,mfraezz/osf.io,pattisdr/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,leb2dg/osf.io,chrisseto/osf.io,saradbowman/osf.io,cslzchen/osf.io,TomBaxter/osf.io,chennan47/osf.io,leb2dg/osf.io,binoculars/osf.io,caseyrollins/osf.io,icereval/osf.io,adlius/osf.io,aaxelb/osf.io,chrisseto/osf.io,erinspace/osf.io,cslzchen/osf.io,icereval/osf.io,mattclark/osf.io,binoculars/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,felliott/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,sloria/osf.io,laurenrevere/osf.io,adlius/osf.io,crcresearch/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,sloria/osf.io,erinspace/osf.io,icereval/osf.io,TomBaxter/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,felliott/osf.io,laurenrevere/osf.io,chrisseto/osf.io,crcresearch/osf.io
|
Add migration for ensure schemas
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-24 19:33
from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.models import MetaSchema
from website.project.metadata.schemas import OSF_META_SCHEMAS
logger = logging.getLogger(__file__)
def add_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
schema_count = 0
for schema in OSF_META_SCHEMAS:
schema_obj, created = MetaSchema.objects.get_or_create(
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.schema = schema
schema_obj.save()
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
removed_schemas = 0
for schema in OSF_META_SCHEMAS:
schema_obj = MetaSchema.objects.get(
schema=schema,
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.delete()
removed_schemas += 1
logger.info('Removed {} schemas from the database'.format(removed_schemas))
class Migration(migrations.Migration):
dependencies = [
('osf', '0035_ensure_licenses'),
]
operations = [
migrations.RunPython(add_schemas, remove_schemas),
]
|
<commit_before><commit_msg>Add migration for ensure schemas<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-24 19:33
from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.models import MetaSchema
from website.project.metadata.schemas import OSF_META_SCHEMAS
logger = logging.getLogger(__file__)
def add_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
schema_count = 0
for schema in OSF_META_SCHEMAS:
schema_obj, created = MetaSchema.objects.get_or_create(
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.schema = schema
schema_obj.save()
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
removed_schemas = 0
for schema in OSF_META_SCHEMAS:
schema_obj = MetaSchema.objects.get(
schema=schema,
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.delete()
removed_schemas += 1
logger.info('Removed {} schemas from the database'.format(removed_schemas))
class Migration(migrations.Migration):
dependencies = [
('osf', '0035_ensure_licenses'),
]
operations = [
migrations.RunPython(add_schemas, remove_schemas),
]
|
Add migration for ensure schemas# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-24 19:33
from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.models import MetaSchema
from website.project.metadata.schemas import OSF_META_SCHEMAS
logger = logging.getLogger(__file__)
def add_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
schema_count = 0
for schema in OSF_META_SCHEMAS:
schema_obj, created = MetaSchema.objects.get_or_create(
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.schema = schema
schema_obj.save()
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
removed_schemas = 0
for schema in OSF_META_SCHEMAS:
schema_obj = MetaSchema.objects.get(
schema=schema,
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.delete()
removed_schemas += 1
logger.info('Removed {} schemas from the database'.format(removed_schemas))
class Migration(migrations.Migration):
dependencies = [
('osf', '0035_ensure_licenses'),
]
operations = [
migrations.RunPython(add_schemas, remove_schemas),
]
|
<commit_before><commit_msg>Add migration for ensure schemas<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-05-24 19:33
from __future__ import unicode_literals
import logging
from django.db import migrations
from osf.models import MetaSchema
from website.project.metadata.schemas import OSF_META_SCHEMAS
logger = logging.getLogger(__file__)
def add_schemas(*args):
"""Import meta-data schemas from JSON to database if not already loaded
"""
schema_count = 0
for schema in OSF_META_SCHEMAS:
schema_obj, created = MetaSchema.objects.get_or_create(
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.schema = schema
schema_obj.save()
schema_count += 1
if created:
logger.info('Added schema {} to the database'.format(schema['name']))
logger.info('Ensured {} schemas are in the database'.format(schema_count))
def remove_schemas(*args):
removed_schemas = 0
for schema in OSF_META_SCHEMAS:
schema_obj = MetaSchema.objects.get(
schema=schema,
name=schema['name'],
schema_version=schema.get('version', 1)
)
schema_obj.delete()
removed_schemas += 1
logger.info('Removed {} schemas from the database'.format(removed_schemas))
class Migration(migrations.Migration):
dependencies = [
('osf', '0035_ensure_licenses'),
]
operations = [
migrations.RunPython(add_schemas, remove_schemas),
]
|
|
ae7b39075f3dde712e50f09a388ccbf9e187eab9
|
saleor/product/migrations/0117_auto_20200423_0737.py
|
saleor/product/migrations/0117_auto_20200423_0737.py
|
# Generated by Django 3.0.5 on 2020-04-23 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0116_auto_20200225_0237'),
]
operations = [
migrations.AlterField(
model_name='producttranslation',
name='name',
field=models.CharField(max_length=250),
),
]
|
Add auto django migration for ProductTranslation alter name operation
|
Add auto django migration for ProductTranslation alter name operation
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor
|
Add auto django migration for ProductTranslation alter name operation
|
# Generated by Django 3.0.5 on 2020-04-23 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0116_auto_20200225_0237'),
]
operations = [
migrations.AlterField(
model_name='producttranslation',
name='name',
field=models.CharField(max_length=250),
),
]
|
<commit_before><commit_msg>Add auto django migration for ProductTranslation alter name operation<commit_after>
|
# Generated by Django 3.0.5 on 2020-04-23 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0116_auto_20200225_0237'),
]
operations = [
migrations.AlterField(
model_name='producttranslation',
name='name',
field=models.CharField(max_length=250),
),
]
|
Add auto django migration for ProductTranslation alter name operation# Generated by Django 3.0.5 on 2020-04-23 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0116_auto_20200225_0237'),
]
operations = [
migrations.AlterField(
model_name='producttranslation',
name='name',
field=models.CharField(max_length=250),
),
]
|
<commit_before><commit_msg>Add auto django migration for ProductTranslation alter name operation<commit_after># Generated by Django 3.0.5 on 2020-04-23 12:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0116_auto_20200225_0237'),
]
operations = [
migrations.AlterField(
model_name='producttranslation',
name='name',
field=models.CharField(max_length=250),
),
]
|
|
2f5bf2a03a8115240690af7fa43244ade5a285a5
|
plugins/models/__init__.py
|
plugins/models/__init__.py
|
# __init__.py
## This module is so that all plugins can have custom models.py files in
## their own folders and have them picked up with
## $ python manage.py syncdb
import inspect
import os
from django.conf import settings
from django.db import models
### Import all the model modules from plugins/ ###
__all__ = []
plugin_path = os.path.join(settings.BASE_DIR, 'plugins')
dirs = os.gitlistdir(plugin_path)
for m in dirs:
pth = os.path.join(plugin_path, m)
model_pth = os.path.join(pth, "models.py")
if os.path.isdir(pth) and os.path.isfile(model_pth):
mod = 'plugins.'+m+'.models'
# print('importing module '+'plugins.'+m)
imp_mod = __import__(mod)
m1 = getattr(__import__(mod), m)
m2 = getattr(m1, "models")
for attr in dir(m2):
a1 = getattr(m2, attr)
# Check if the class is a class derived from
# bot.PluginDespatch.Plugin
# but is not the base class only
if inspect.isclass(a1) and \
a1 != models.Model and \
issubclass(a1, models.Model):
from_path = "plugins."+m+".models"
# print('from {} import {}'.format(from_path, a1.__name__))
globals()[a1.__name__] = a1
__all__.append(a1.__name__)
### ==== End plugin models modules Import === ###
|
Allow plugins to have models.py files in their own plugin folders
|
Allow plugins to have models.py files in their own plugin folders
|
Python
|
apache-2.0
|
kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2
|
Allow plugins to have models.py files in their own plugin folders
|
# __init__.py
## This module is so that all plugins can have custom models.py files in
## their own folders and have them picked up with
## $ python manage.py syncdb
import inspect
import os
from django.conf import settings
from django.db import models
### Import all the model modules from plugins/ ###
__all__ = []
plugin_path = os.path.join(settings.BASE_DIR, 'plugins')
dirs = os.gitlistdir(plugin_path)
for m in dirs:
pth = os.path.join(plugin_path, m)
model_pth = os.path.join(pth, "models.py")
if os.path.isdir(pth) and os.path.isfile(model_pth):
mod = 'plugins.'+m+'.models'
# print('importing module '+'plugins.'+m)
imp_mod = __import__(mod)
m1 = getattr(__import__(mod), m)
m2 = getattr(m1, "models")
for attr in dir(m2):
a1 = getattr(m2, attr)
# Check if the class is a class derived from
# bot.PluginDespatch.Plugin
# but is not the base class only
if inspect.isclass(a1) and \
a1 != models.Model and \
issubclass(a1, models.Model):
from_path = "plugins."+m+".models"
# print('from {} import {}'.format(from_path, a1.__name__))
globals()[a1.__name__] = a1
__all__.append(a1.__name__)
### ==== End plugin models modules Import === ###
|
<commit_before><commit_msg>Allow plugins to have models.py files in their own plugin folders<commit_after>
|
# __init__.py
## This module is so that all plugins can have custom models.py files in
## their own folders and have them picked up with
## $ python manage.py syncdb
import inspect
import os
from django.conf import settings
from django.db import models
### Import all the model modules from plugins/ ###
__all__ = []
plugin_path = os.path.join(settings.BASE_DIR, 'plugins')
dirs = os.gitlistdir(plugin_path)
for m in dirs:
pth = os.path.join(plugin_path, m)
model_pth = os.path.join(pth, "models.py")
if os.path.isdir(pth) and os.path.isfile(model_pth):
mod = 'plugins.'+m+'.models'
# print('importing module '+'plugins.'+m)
imp_mod = __import__(mod)
m1 = getattr(__import__(mod), m)
m2 = getattr(m1, "models")
for attr in dir(m2):
a1 = getattr(m2, attr)
# Check if the class is a class derived from
# bot.PluginDespatch.Plugin
# but is not the base class only
if inspect.isclass(a1) and \
a1 != models.Model and \
issubclass(a1, models.Model):
from_path = "plugins."+m+".models"
# print('from {} import {}'.format(from_path, a1.__name__))
globals()[a1.__name__] = a1
__all__.append(a1.__name__)
### ==== End plugin models modules Import === ###
|
Allow plugins to have models.py files in their own plugin folders# __init__.py
## This module is so that all plugins can have custom models.py files in
## their own folders and have them picked up with
## $ python manage.py syncdb
import inspect
import os
from django.conf import settings
from django.db import models
### Import all the model modules from plugins/ ###
__all__ = []
plugin_path = os.path.join(settings.BASE_DIR, 'plugins')
dirs = os.gitlistdir(plugin_path)
for m in dirs:
pth = os.path.join(plugin_path, m)
model_pth = os.path.join(pth, "models.py")
if os.path.isdir(pth) and os.path.isfile(model_pth):
mod = 'plugins.'+m+'.models'
# print('importing module '+'plugins.'+m)
imp_mod = __import__(mod)
m1 = getattr(__import__(mod), m)
m2 = getattr(m1, "models")
for attr in dir(m2):
a1 = getattr(m2, attr)
# Check if the class is a class derived from
# bot.PluginDespatch.Plugin
# but is not the base class only
if inspect.isclass(a1) and \
a1 != models.Model and \
issubclass(a1, models.Model):
from_path = "plugins."+m+".models"
# print('from {} import {}'.format(from_path, a1.__name__))
globals()[a1.__name__] = a1
__all__.append(a1.__name__)
### ==== End plugin models modules Import === ###
|
<commit_before><commit_msg>Allow plugins to have models.py files in their own plugin folders<commit_after># __init__.py
## This module is so that all plugins can have custom models.py files in
## their own folders and have them picked up with
## $ python manage.py syncdb
import inspect
import os
from django.conf import settings
from django.db import models
### Import all the model modules from plugins/ ###
__all__ = []
plugin_path = os.path.join(settings.BASE_DIR, 'plugins')
dirs = os.gitlistdir(plugin_path)
for m in dirs:
pth = os.path.join(plugin_path, m)
model_pth = os.path.join(pth, "models.py")
if os.path.isdir(pth) and os.path.isfile(model_pth):
mod = 'plugins.'+m+'.models'
# print('importing module '+'plugins.'+m)
imp_mod = __import__(mod)
m1 = getattr(__import__(mod), m)
m2 = getattr(m1, "models")
for attr in dir(m2):
a1 = getattr(m2, attr)
# Check if the class is a class derived from
# bot.PluginDespatch.Plugin
# but is not the base class only
if inspect.isclass(a1) and \
a1 != models.Model and \
issubclass(a1, models.Model):
from_path = "plugins."+m+".models"
# print('from {} import {}'.format(from_path, a1.__name__))
globals()[a1.__name__] = a1
__all__.append(a1.__name__)
### ==== End plugin models modules Import === ###
|
|
55c1aaa740724661d58f17b9be818e91a04fde9c
|
pythran/tests/cases/comp_unrolling.py
|
pythran/tests/cases/comp_unrolling.py
|
#pythran export list_comp(int list list)
#runas list_comp([[], [], [1]])
def foo(cc, x, y):
for a in cc:
if a:
return True
return False
def list_comp(cc):
return [(x,y) for x in range(1) for y in range(2) if foo(cc, x, y)]
|
Add new test case for unrolled list comprehension comprehension
|
Add new test case for unrolled list comprehension comprehension
|
Python
|
bsd-3-clause
|
pbrunet/pythran,serge-sans-paille/pythran,pombredanne/pythran,pombredanne/pythran,hainm/pythran,artas360/pythran,artas360/pythran,serge-sans-paille/pythran,hainm/pythran,pbrunet/pythran,artas360/pythran,hainm/pythran,pombredanne/pythran,pbrunet/pythran
|
Add new test case for unrolled list comprehension comprehension
|
#pythran export list_comp(int list list)
#runas list_comp([[], [], [1]])
def foo(cc, x, y):
for a in cc:
if a:
return True
return False
def list_comp(cc):
return [(x,y) for x in range(1) for y in range(2) if foo(cc, x, y)]
|
<commit_before><commit_msg>Add new test case for unrolled list comprehension comprehension<commit_after>
|
#pythran export list_comp(int list list)
#runas list_comp([[], [], [1]])
def foo(cc, x, y):
for a in cc:
if a:
return True
return False
def list_comp(cc):
return [(x,y) for x in range(1) for y in range(2) if foo(cc, x, y)]
|
Add new test case for unrolled list comprehension comprehension#pythran export list_comp(int list list)
#runas list_comp([[], [], [1]])
def foo(cc, x, y):
for a in cc:
if a:
return True
return False
def list_comp(cc):
return [(x,y) for x in range(1) for y in range(2) if foo(cc, x, y)]
|
<commit_before><commit_msg>Add new test case for unrolled list comprehension comprehension<commit_after>#pythran export list_comp(int list list)
#runas list_comp([[], [], [1]])
def foo(cc, x, y):
for a in cc:
if a:
return True
return False
def list_comp(cc):
return [(x,y) for x in range(1) for y in range(2) if foo(cc, x, y)]
|
|
29bc58359f2468ef488563abb4f2d1974e848b1c
|
py/second-minimum-node-in-a-binary-tree.py
|
py/second-minimum-node-in-a-binary-tree.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def dfs(self, cur):
if cur:
if self.smallest is None:
self.smallest = cur.val
elif self.smallest > cur.val:
self.smallest, self.secondSmallest = cur.val, self.smallest
elif self.smallest < cur.val:
if self.secondSmallest is None:
self.secondSmallest = cur.val
elif self.secondSmallest > cur.val:
self.secondSmallest = cur.val
self.dfs(cur.left)
self.dfs(cur.right)
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.smallest = None
self.secondSmallest = None
self.dfs(root)
return -1 if self.secondSmallest is None else self.secondSmallest
|
Add py solution for 671. Second Minimum Node In a Binary Tree
|
Add py solution for 671. Second Minimum Node In a Binary Tree
671. Second Minimum Node In a Binary Tree: https://leetcode.com/problems/second-minimum-node-in-a-binary-tree/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 671. Second Minimum Node In a Binary Tree
671. Second Minimum Node In a Binary Tree: https://leetcode.com/problems/second-minimum-node-in-a-binary-tree/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def dfs(self, cur):
if cur:
if self.smallest is None:
self.smallest = cur.val
elif self.smallest > cur.val:
self.smallest, self.secondSmallest = cur.val, self.smallest
elif self.smallest < cur.val:
if self.secondSmallest is None:
self.secondSmallest = cur.val
elif self.secondSmallest > cur.val:
self.secondSmallest = cur.val
self.dfs(cur.left)
self.dfs(cur.right)
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.smallest = None
self.secondSmallest = None
self.dfs(root)
return -1 if self.secondSmallest is None else self.secondSmallest
|
<commit_before><commit_msg>Add py solution for 671. Second Minimum Node In a Binary Tree
671. Second Minimum Node In a Binary Tree: https://leetcode.com/problems/second-minimum-node-in-a-binary-tree/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def dfs(self, cur):
if cur:
if self.smallest is None:
self.smallest = cur.val
elif self.smallest > cur.val:
self.smallest, self.secondSmallest = cur.val, self.smallest
elif self.smallest < cur.val:
if self.secondSmallest is None:
self.secondSmallest = cur.val
elif self.secondSmallest > cur.val:
self.secondSmallest = cur.val
self.dfs(cur.left)
self.dfs(cur.right)
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.smallest = None
self.secondSmallest = None
self.dfs(root)
return -1 if self.secondSmallest is None else self.secondSmallest
|
Add py solution for 671. Second Minimum Node In a Binary Tree
671. Second Minimum Node In a Binary Tree: https://leetcode.com/problems/second-minimum-node-in-a-binary-tree/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def dfs(self, cur):
if cur:
if self.smallest is None:
self.smallest = cur.val
elif self.smallest > cur.val:
self.smallest, self.secondSmallest = cur.val, self.smallest
elif self.smallest < cur.val:
if self.secondSmallest is None:
self.secondSmallest = cur.val
elif self.secondSmallest > cur.val:
self.secondSmallest = cur.val
self.dfs(cur.left)
self.dfs(cur.right)
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.smallest = None
self.secondSmallest = None
self.dfs(root)
return -1 if self.secondSmallest is None else self.secondSmallest
|
<commit_before><commit_msg>Add py solution for 671. Second Minimum Node In a Binary Tree
671. Second Minimum Node In a Binary Tree: https://leetcode.com/problems/second-minimum-node-in-a-binary-tree/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def dfs(self, cur):
if cur:
if self.smallest is None:
self.smallest = cur.val
elif self.smallest > cur.val:
self.smallest, self.secondSmallest = cur.val, self.smallest
elif self.smallest < cur.val:
if self.secondSmallest is None:
self.secondSmallest = cur.val
elif self.secondSmallest > cur.val:
self.secondSmallest = cur.val
self.dfs(cur.left)
self.dfs(cur.right)
def findSecondMinimumValue(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.smallest = None
self.secondSmallest = None
self.dfs(root)
return -1 if self.secondSmallest is None else self.secondSmallest
|
|
b4db8160e18ceedb3d7946c180fa8963d9e9755d
|
test/compute/test_backward_compatibility.py
|
test/compute/test_backward_compatibility.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
class BackwardCompatibilityTests(unittest.TestCase):
def test_all_the_old_paths_works(self):
# Common
from libcloud.types import InvalidCredsError
from libcloud.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.types import NodeState
from libcloud.types import LibcloudError
from libcloud.base import Response
from libcloud.base import ConnectionKey, ConnectionUserAndKey
from libcloud.base import NodeAuthPassword
# Driver specific
from libcloud.drivers.brightbox import BrightboxNodeDriver
from libcloud.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from libcloud.drivers.elastichosts import ElasticHostsBaseNodeDriver
from libcloud.drivers.gogrid import GoGridNodeDriver, GoGridIpAddress
from libcloud.drivers.linode import LinodeNodeDriver
from libcloud.drivers.vpsnet import VPSNetNodeDriver
from libcloud.drivers.opennebula import OpenNebulaNodeDriver
from libcloud.drivers.ibm_sbc import IBMNodeDriver as IBM
from libcloud.drivers.rackspace import RackspaceNodeDriver as Rackspace
from libcloud.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.drivers.voxel import VoxelNodeDriver as Voxel
from libcloud.drivers.vcloud import TerremarkDriver
from libcloud.drivers.vcloud import VCloudNodeDriver
from libcloud.drivers.slicehost import SlicehostNodeDriver as Slicehost
from libcloud.drivers.softlayer import SoftLayerNodeDriver as SoftLayer
from libcloud.drivers.ecp import ECPNodeDriver
from libcloud.drivers.cloudsigma import str2dicts, str2list, dict2str
if __name__ == '__main__':
sys.exit(unittest.main())
|
Add some basic tests for backward compatibility.
|
Add some basic tests for backward compatibility.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1082028 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
aleGpereira/libcloud,wido/libcloud,ClusterHQ/libcloud,cryptickp/libcloud,sfriesel/libcloud,Kami/libcloud,supertom/libcloud,ZuluPro/libcloud,carletes/libcloud,mistio/libcloud,techhat/libcloud,aleGpereira/libcloud,andrewsomething/libcloud,lochiiconnectivity/libcloud,ninefold/libcloud,Cloud-Elasticity-Services/as-libcloud,aviweit/libcloud,ByteInternet/libcloud,supertom/libcloud,Scalr/libcloud,andrewsomething/libcloud,cloudControl/libcloud,wuyuewen/libcloud,erjohnso/libcloud,iPlantCollaborativeOpenSource/libcloud,JamesGuthrie/libcloud,MrBasset/libcloud,erjohnso/libcloud,cloudControl/libcloud,Itxaka/libcloud,jerryblakley/libcloud,mbrukman/libcloud,NexusIS/libcloud,aviweit/libcloud,thesquelched/libcloud,smaffulli/libcloud,mistio/libcloud,wuyuewen/libcloud,jimbobhickville/libcloud,SecurityCompass/libcloud,pquentin/libcloud,mtekel/libcloud,marcinzaremba/libcloud,Jc2k/libcloud,jimbobhickville/libcloud,pantheon-systems/libcloud,thesquelched/libcloud,t-tran/libcloud,dcorbacho/libcloud,apache/libcloud,briancurtin/libcloud,marcinzaremba/libcloud,pquentin/libcloud,supertom/libcloud,StackPointCloud/libcloud,watermelo/libcloud,mathspace/libcloud,wrigri/libcloud,wido/libcloud,samuelchong/libcloud,t-tran/libcloud,andrewsomething/libcloud,watermelo/libcloud,dcorbacho/libcloud,aviweit/libcloud,Keisuke69/libcloud,illfelder/libcloud,munkiat/libcloud,pantheon-systems/libcloud,NexusIS/libcloud,ByteInternet/libcloud,jerryblakley/libcloud,mbrukman/libcloud,jimbobhickville/libcloud,watermelo/libcloud,DimensionDataCBUSydney/libcloud,wrigri/libcloud,atsaki/libcloud,pquentin/libcloud,apache/libcloud,StackPointCloud/libcloud,thesquelched/libcloud,lochiiconnectivity/libcloud,sergiorua/libcloud,samuelchong/libcloud,Verizon/libcloud,pantheon-systems/libcloud,carletes/libcloud,Cloud-Elasticity-Services/as-libcloud,cryptickp/libcloud,t-tran/libcloud,kater169/libcloud,schaubl/libcloud,illfelder/libcloud,wrigri/libcloud,aleGpereira/libcloud,munkiat/libcloud,jerryblakley/libcloud,schaubl/libcloud,erjohnso/libcloud,cryptickp/libcloud,sergiorua/libcloud,vongazman/libcloud,ClusterHQ/libcloud,iPlantCollaborativeOpenSource/libcloud,techhat/libcloud,Kami/libcloud,mbrukman/libcloud,JamesGuthrie/libcloud,kater169/libcloud,sahildua2305/libcloud,Keisuke69/libcloud,mathspace/libcloud,vongazman/libcloud,mistio/libcloud,kater169/libcloud,sergiorua/libcloud,Kami/libcloud,wuyuewen/libcloud,SecurityCompass/libcloud,Jc2k/libcloud,samuelchong/libcloud,sahildua2305/libcloud,mgogoulos/libcloud,sgammon/libcloud,curoverse/libcloud,Scalr/libcloud,mtekel/libcloud,mgogoulos/libcloud,smaffulli/libcloud,atsaki/libcloud,sfriesel/libcloud,marcinzaremba/libcloud,cloudControl/libcloud,lochiiconnectivity/libcloud,ZuluPro/libcloud,Scalr/libcloud,wido/libcloud,curoverse/libcloud,apache/libcloud,niteoweb/libcloud,NexusIS/libcloud,Cloud-Elasticity-Services/as-libcloud,iPlantCollaborativeOpenSource/libcloud,MrBasset/libcloud,ZuluPro/libcloud,curoverse/libcloud,SecurityCompass/libcloud,schaubl/libcloud,Itxaka/libcloud,JamesGuthrie/libcloud,StackPointCloud/libcloud,munkiat/libcloud,briancurtin/libcloud,Itxaka/libcloud,briancurtin/libcloud,techhat/libcloud,illfelder/libcloud,sfriesel/libcloud,Verizon/libcloud,DimensionDataCBUSydney/libcloud,mathspace/libcloud,Verizon/libcloud,ByteInternet/libcloud,atsaki/libcloud,mgogoulos/libcloud,niteoweb/libcloud,DimensionDataCBUSydney/libcloud,dcorbacho/libcloud,MrBasset/libcloud,sahildua2305/libcloud,smaffulli/libcloud,sgammon/libcloud,niteoweb/libcloud,mtekel/libcloud,carletes/libcloud,vongazman/libcloud,ninefold/libcloud
|
Add some basic tests for backward compatibility.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1082028 13f79535-47bb-0310-9956-ffa450edef68
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
class BackwardCompatibilityTests(unittest.TestCase):
def test_all_the_old_paths_works(self):
# Common
from libcloud.types import InvalidCredsError
from libcloud.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.types import NodeState
from libcloud.types import LibcloudError
from libcloud.base import Response
from libcloud.base import ConnectionKey, ConnectionUserAndKey
from libcloud.base import NodeAuthPassword
# Driver specific
from libcloud.drivers.brightbox import BrightboxNodeDriver
from libcloud.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from libcloud.drivers.elastichosts import ElasticHostsBaseNodeDriver
from libcloud.drivers.gogrid import GoGridNodeDriver, GoGridIpAddress
from libcloud.drivers.linode import LinodeNodeDriver
from libcloud.drivers.vpsnet import VPSNetNodeDriver
from libcloud.drivers.opennebula import OpenNebulaNodeDriver
from libcloud.drivers.ibm_sbc import IBMNodeDriver as IBM
from libcloud.drivers.rackspace import RackspaceNodeDriver as Rackspace
from libcloud.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.drivers.voxel import VoxelNodeDriver as Voxel
from libcloud.drivers.vcloud import TerremarkDriver
from libcloud.drivers.vcloud import VCloudNodeDriver
from libcloud.drivers.slicehost import SlicehostNodeDriver as Slicehost
from libcloud.drivers.softlayer import SoftLayerNodeDriver as SoftLayer
from libcloud.drivers.ecp import ECPNodeDriver
from libcloud.drivers.cloudsigma import str2dicts, str2list, dict2str
if __name__ == '__main__':
sys.exit(unittest.main())
|
<commit_before><commit_msg>Add some basic tests for backward compatibility.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1082028 13f79535-47bb-0310-9956-ffa450edef68<commit_after>
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
class BackwardCompatibilityTests(unittest.TestCase):
def test_all_the_old_paths_works(self):
# Common
from libcloud.types import InvalidCredsError
from libcloud.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.types import NodeState
from libcloud.types import LibcloudError
from libcloud.base import Response
from libcloud.base import ConnectionKey, ConnectionUserAndKey
from libcloud.base import NodeAuthPassword
# Driver specific
from libcloud.drivers.brightbox import BrightboxNodeDriver
from libcloud.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from libcloud.drivers.elastichosts import ElasticHostsBaseNodeDriver
from libcloud.drivers.gogrid import GoGridNodeDriver, GoGridIpAddress
from libcloud.drivers.linode import LinodeNodeDriver
from libcloud.drivers.vpsnet import VPSNetNodeDriver
from libcloud.drivers.opennebula import OpenNebulaNodeDriver
from libcloud.drivers.ibm_sbc import IBMNodeDriver as IBM
from libcloud.drivers.rackspace import RackspaceNodeDriver as Rackspace
from libcloud.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.drivers.voxel import VoxelNodeDriver as Voxel
from libcloud.drivers.vcloud import TerremarkDriver
from libcloud.drivers.vcloud import VCloudNodeDriver
from libcloud.drivers.slicehost import SlicehostNodeDriver as Slicehost
from libcloud.drivers.softlayer import SoftLayerNodeDriver as SoftLayer
from libcloud.drivers.ecp import ECPNodeDriver
from libcloud.drivers.cloudsigma import str2dicts, str2list, dict2str
if __name__ == '__main__':
sys.exit(unittest.main())
|
Add some basic tests for backward compatibility.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1082028 13f79535-47bb-0310-9956-ffa450edef68# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
class BackwardCompatibilityTests(unittest.TestCase):
def test_all_the_old_paths_works(self):
# Common
from libcloud.types import InvalidCredsError
from libcloud.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.types import NodeState
from libcloud.types import LibcloudError
from libcloud.base import Response
from libcloud.base import ConnectionKey, ConnectionUserAndKey
from libcloud.base import NodeAuthPassword
# Driver specific
from libcloud.drivers.brightbox import BrightboxNodeDriver
from libcloud.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from libcloud.drivers.elastichosts import ElasticHostsBaseNodeDriver
from libcloud.drivers.gogrid import GoGridNodeDriver, GoGridIpAddress
from libcloud.drivers.linode import LinodeNodeDriver
from libcloud.drivers.vpsnet import VPSNetNodeDriver
from libcloud.drivers.opennebula import OpenNebulaNodeDriver
from libcloud.drivers.ibm_sbc import IBMNodeDriver as IBM
from libcloud.drivers.rackspace import RackspaceNodeDriver as Rackspace
from libcloud.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.drivers.voxel import VoxelNodeDriver as Voxel
from libcloud.drivers.vcloud import TerremarkDriver
from libcloud.drivers.vcloud import VCloudNodeDriver
from libcloud.drivers.slicehost import SlicehostNodeDriver as Slicehost
from libcloud.drivers.softlayer import SoftLayerNodeDriver as SoftLayer
from libcloud.drivers.ecp import ECPNodeDriver
from libcloud.drivers.cloudsigma import str2dicts, str2list, dict2str
if __name__ == '__main__':
sys.exit(unittest.main())
|
<commit_before><commit_msg>Add some basic tests for backward compatibility.
git-svn-id: 353d90d4d8d13dcb4e0402680a9155a727f61a5a@1082028 13f79535-47bb-0310-9956-ffa450edef68<commit_after># Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
class BackwardCompatibilityTests(unittest.TestCase):
def test_all_the_old_paths_works(self):
# Common
from libcloud.types import InvalidCredsError
from libcloud.base import Node, NodeImage, NodeSize, NodeLocation
from libcloud.types import NodeState
from libcloud.types import LibcloudError
from libcloud.base import Response
from libcloud.base import ConnectionKey, ConnectionUserAndKey
from libcloud.base import NodeAuthPassword
# Driver specific
from libcloud.drivers.brightbox import BrightboxNodeDriver
from libcloud.drivers.cloudsigma import CloudSigmaZrhNodeDriver
from libcloud.drivers.rimuhosting import RimuHostingNodeDriver
from libcloud.drivers.elastichosts import ElasticHostsBaseNodeDriver
from libcloud.drivers.gogrid import GoGridNodeDriver, GoGridIpAddress
from libcloud.drivers.linode import LinodeNodeDriver
from libcloud.drivers.vpsnet import VPSNetNodeDriver
from libcloud.drivers.opennebula import OpenNebulaNodeDriver
from libcloud.drivers.ibm_sbc import IBMNodeDriver as IBM
from libcloud.drivers.rackspace import RackspaceNodeDriver as Rackspace
from libcloud.drivers.ec2 import EC2NodeDriver, EC2APSENodeDriver
from libcloud.drivers.ec2 import EC2APNENodeDriver, IdempotentParamError
from libcloud.drivers.voxel import VoxelNodeDriver as Voxel
from libcloud.drivers.vcloud import TerremarkDriver
from libcloud.drivers.vcloud import VCloudNodeDriver
from libcloud.drivers.slicehost import SlicehostNodeDriver as Slicehost
from libcloud.drivers.softlayer import SoftLayerNodeDriver as SoftLayer
from libcloud.drivers.ecp import ECPNodeDriver
from libcloud.drivers.cloudsigma import str2dicts, str2list, dict2str
if __name__ == '__main__':
sys.exit(unittest.main())
|
|
f6ef0ebbd71e684f803c0d5c0f1a3da0c65a13d6
|
migrations/versions/0158_remove_rate_limit_default.py
|
migrations/versions/0158_remove_rate_limit_default.py
|
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
from alembic import op
import sqlalchemy as sa
revision = '0158_remove_rate_limit_default'
down_revision = '0157_add_rate_limit_to_service'
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
|
Remove database default on rate_limit column
|
Remove database default on rate_limit column
The default for the rate_limit column in the services and
services_history model is now set in the model, so we can remove the
default from the database.
Pivotal story: https://www.pivotaltracker.com/story/show/153992529
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Remove database default on rate_limit column
The default for the rate_limit column in the services and
services_history model is now set in the model, so we can remove the
default from the database.
Pivotal story: https://www.pivotaltracker.com/story/show/153992529
|
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
from alembic import op
import sqlalchemy as sa
revision = '0158_remove_rate_limit_default'
down_revision = '0157_add_rate_limit_to_service'
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
|
<commit_before><commit_msg>Remove database default on rate_limit column
The default for the rate_limit column in the services and
services_history model is now set in the model, so we can remove the
default from the database.
Pivotal story: https://www.pivotaltracker.com/story/show/153992529<commit_after>
|
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
from alembic import op
import sqlalchemy as sa
revision = '0158_remove_rate_limit_default'
down_revision = '0157_add_rate_limit_to_service'
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
|
Remove database default on rate_limit column
The default for the rate_limit column in the services and
services_history model is now set in the model, so we can remove the
default from the database.
Pivotal story: https://www.pivotaltracker.com/story/show/153992529"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
from alembic import op
import sqlalchemy as sa
revision = '0158_remove_rate_limit_default'
down_revision = '0157_add_rate_limit_to_service'
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
|
<commit_before><commit_msg>Remove database default on rate_limit column
The default for the rate_limit column in the services and
services_history model is now set in the model, so we can remove the
default from the database.
Pivotal story: https://www.pivotaltracker.com/story/show/153992529<commit_after>"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
from alembic import op
import sqlalchemy as sa
revision = '0158_remove_rate_limit_default'
down_revision = '0157_add_rate_limit_to_service'
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
|
|
422488c1bd9f54f6899ca51806b6b072ba45d18b
|
mediacloud/mediawords/db/export/test_export_tables.py
|
mediacloud/mediawords/db/export/test_export_tables.py
|
from io import StringIO
from mediawords.db import connect_to_db
from mediawords.db.export.export_tables import *
def test_export_tables_to_backup_crawler():
# Basic sanity test to make sure something gets printed out to STDOUT
# FIXME it would be better to try importing the resulting dump somewhere
db = connect_to_db()
orig_stdout = sys.stdout
sys.stdout = captured_stdout = StringIO()
export_dump_exception = None
try:
export_tables_to_backup_crawler(db=db)
except Exception as ex:
export_dump_exception = str(ex)
sys.stdout = orig_stdout
assert export_dump_exception is None
sql_dump = captured_stdout.getvalue()
assert 'COPY media' in sql_dump
|
Add basic sanity test for export_tables_to_backup_crawler()
|
Add basic sanity test for export_tables_to_backup_crawler()
|
Python
|
agpl-3.0
|
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
|
Add basic sanity test for export_tables_to_backup_crawler()
|
from io import StringIO
from mediawords.db import connect_to_db
from mediawords.db.export.export_tables import *
def test_export_tables_to_backup_crawler():
# Basic sanity test to make sure something gets printed out to STDOUT
# FIXME it would be better to try importing the resulting dump somewhere
db = connect_to_db()
orig_stdout = sys.stdout
sys.stdout = captured_stdout = StringIO()
export_dump_exception = None
try:
export_tables_to_backup_crawler(db=db)
except Exception as ex:
export_dump_exception = str(ex)
sys.stdout = orig_stdout
assert export_dump_exception is None
sql_dump = captured_stdout.getvalue()
assert 'COPY media' in sql_dump
|
<commit_before><commit_msg>Add basic sanity test for export_tables_to_backup_crawler()<commit_after>
|
from io import StringIO
from mediawords.db import connect_to_db
from mediawords.db.export.export_tables import *
def test_export_tables_to_backup_crawler():
# Basic sanity test to make sure something gets printed out to STDOUT
# FIXME it would be better to try importing the resulting dump somewhere
db = connect_to_db()
orig_stdout = sys.stdout
sys.stdout = captured_stdout = StringIO()
export_dump_exception = None
try:
export_tables_to_backup_crawler(db=db)
except Exception as ex:
export_dump_exception = str(ex)
sys.stdout = orig_stdout
assert export_dump_exception is None
sql_dump = captured_stdout.getvalue()
assert 'COPY media' in sql_dump
|
Add basic sanity test for export_tables_to_backup_crawler()from io import StringIO
from mediawords.db import connect_to_db
from mediawords.db.export.export_tables import *
def test_export_tables_to_backup_crawler():
# Basic sanity test to make sure something gets printed out to STDOUT
# FIXME it would be better to try importing the resulting dump somewhere
db = connect_to_db()
orig_stdout = sys.stdout
sys.stdout = captured_stdout = StringIO()
export_dump_exception = None
try:
export_tables_to_backup_crawler(db=db)
except Exception as ex:
export_dump_exception = str(ex)
sys.stdout = orig_stdout
assert export_dump_exception is None
sql_dump = captured_stdout.getvalue()
assert 'COPY media' in sql_dump
|
<commit_before><commit_msg>Add basic sanity test for export_tables_to_backup_crawler()<commit_after>from io import StringIO
from mediawords.db import connect_to_db
from mediawords.db.export.export_tables import *
def test_export_tables_to_backup_crawler():
# Basic sanity test to make sure something gets printed out to STDOUT
# FIXME it would be better to try importing the resulting dump somewhere
db = connect_to_db()
orig_stdout = sys.stdout
sys.stdout = captured_stdout = StringIO()
export_dump_exception = None
try:
export_tables_to_backup_crawler(db=db)
except Exception as ex:
export_dump_exception = str(ex)
sys.stdout = orig_stdout
assert export_dump_exception is None
sql_dump = captured_stdout.getvalue()
assert 'COPY media' in sql_dump
|
|
5484e1acb69d8a7238bbfa1056551867f9d52525
|
webtool/server/migrations/0013_auto_20171111_0435.py
|
webtool/server/migrations/0013_auto_20171111_0435.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 03:35
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0012_auto_20171109_0556'),
]
operations = [
migrations.AddField(
model_name='reference',
name='prefix',
field=models.PositiveSmallIntegerField(default=8, validators=[django.core.validators.MaxValueValidator(9, 'Bitte keine Zahlen größer 9 eingeben')], verbose_name='Jahreszahl'),
preserve_default=False,
)
]
|
Improve category filter for activities
|
Improve category filter for activities
|
Python
|
bsd-2-clause
|
wodo/WebTool3,wodo/WebTool3,wodo/WebTool3,wodo/WebTool3
|
Improve category filter for activities
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 03:35
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0012_auto_20171109_0556'),
]
operations = [
migrations.AddField(
model_name='reference',
name='prefix',
field=models.PositiveSmallIntegerField(default=8, validators=[django.core.validators.MaxValueValidator(9, 'Bitte keine Zahlen größer 9 eingeben')], verbose_name='Jahreszahl'),
preserve_default=False,
)
]
|
<commit_before><commit_msg>Improve category filter for activities<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 03:35
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0012_auto_20171109_0556'),
]
operations = [
migrations.AddField(
model_name='reference',
name='prefix',
field=models.PositiveSmallIntegerField(default=8, validators=[django.core.validators.MaxValueValidator(9, 'Bitte keine Zahlen größer 9 eingeben')], verbose_name='Jahreszahl'),
preserve_default=False,
)
]
|
Improve category filter for activities# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 03:35
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0012_auto_20171109_0556'),
]
operations = [
migrations.AddField(
model_name='reference',
name='prefix',
field=models.PositiveSmallIntegerField(default=8, validators=[django.core.validators.MaxValueValidator(9, 'Bitte keine Zahlen größer 9 eingeben')], verbose_name='Jahreszahl'),
preserve_default=False,
)
]
|
<commit_before><commit_msg>Improve category filter for activities<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-11 03:35
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('server', '0012_auto_20171109_0556'),
]
operations = [
migrations.AddField(
model_name='reference',
name='prefix',
field=models.PositiveSmallIntegerField(default=8, validators=[django.core.validators.MaxValueValidator(9, 'Bitte keine Zahlen größer 9 eingeben')], verbose_name='Jahreszahl'),
preserve_default=False,
)
]
|
|
3a28809b5a642d14d7174c8a309b130822d30e72
|
skan/test/test_csr.py
|
skan/test/test_csr.py
|
import os, sys
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skan import csr
rundir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(rundir)
from skan._testdata import tinycycle, skeleton1
def test_tiny_cycle():
g, idxs, degimg = csr.skeleton_to_csgraph(tinycycle)
expected_indptr = [0, 0, 2, 4, 6, 8]
expected_indices = [2, 3, 1, 4, 1, 4, 2, 3]
expected_data = np.sqrt(2)
assert_equal(g.indptr, expected_indptr)
assert_equal(g.indices, expected_indices)
assert_almost_equal(g.data, expected_data)
expected_degrees = np.array([[0, 2, 0], [2, 0, 2], [0, 2, 0]])
assert_equal(degimg, expected_degrees)
assert_equal(idxs, [0, 1, 3, 5, 7])
def test_skeleton1_stats():
args = csr.skeleton_to_csgraph(skeleton1)
stats = csr.branch_statistics(*args)
|
Add incomplete test file for CSR
|
Add incomplete test file for CSR
|
Python
|
bsd-3-clause
|
jni/skan
|
Add incomplete test file for CSR
|
import os, sys
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skan import csr
rundir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(rundir)
from skan._testdata import tinycycle, skeleton1
def test_tiny_cycle():
g, idxs, degimg = csr.skeleton_to_csgraph(tinycycle)
expected_indptr = [0, 0, 2, 4, 6, 8]
expected_indices = [2, 3, 1, 4, 1, 4, 2, 3]
expected_data = np.sqrt(2)
assert_equal(g.indptr, expected_indptr)
assert_equal(g.indices, expected_indices)
assert_almost_equal(g.data, expected_data)
expected_degrees = np.array([[0, 2, 0], [2, 0, 2], [0, 2, 0]])
assert_equal(degimg, expected_degrees)
assert_equal(idxs, [0, 1, 3, 5, 7])
def test_skeleton1_stats():
args = csr.skeleton_to_csgraph(skeleton1)
stats = csr.branch_statistics(*args)
|
<commit_before><commit_msg>Add incomplete test file for CSR<commit_after>
|
import os, sys
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skan import csr
rundir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(rundir)
from skan._testdata import tinycycle, skeleton1
def test_tiny_cycle():
g, idxs, degimg = csr.skeleton_to_csgraph(tinycycle)
expected_indptr = [0, 0, 2, 4, 6, 8]
expected_indices = [2, 3, 1, 4, 1, 4, 2, 3]
expected_data = np.sqrt(2)
assert_equal(g.indptr, expected_indptr)
assert_equal(g.indices, expected_indices)
assert_almost_equal(g.data, expected_data)
expected_degrees = np.array([[0, 2, 0], [2, 0, 2], [0, 2, 0]])
assert_equal(degimg, expected_degrees)
assert_equal(idxs, [0, 1, 3, 5, 7])
def test_skeleton1_stats():
args = csr.skeleton_to_csgraph(skeleton1)
stats = csr.branch_statistics(*args)
|
Add incomplete test file for CSRimport os, sys
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skan import csr
rundir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(rundir)
from skan._testdata import tinycycle, skeleton1
def test_tiny_cycle():
g, idxs, degimg = csr.skeleton_to_csgraph(tinycycle)
expected_indptr = [0, 0, 2, 4, 6, 8]
expected_indices = [2, 3, 1, 4, 1, 4, 2, 3]
expected_data = np.sqrt(2)
assert_equal(g.indptr, expected_indptr)
assert_equal(g.indices, expected_indices)
assert_almost_equal(g.data, expected_data)
expected_degrees = np.array([[0, 2, 0], [2, 0, 2], [0, 2, 0]])
assert_equal(degimg, expected_degrees)
assert_equal(idxs, [0, 1, 3, 5, 7])
def test_skeleton1_stats():
args = csr.skeleton_to_csgraph(skeleton1)
stats = csr.branch_statistics(*args)
|
<commit_before><commit_msg>Add incomplete test file for CSR<commit_after>import os, sys
import numpy as np
from numpy.testing import assert_equal, assert_almost_equal
from skan import csr
rundir = os.path.dirname(os.path.abspath(__file__))
sys.path.append(rundir)
from skan._testdata import tinycycle, skeleton1
def test_tiny_cycle():
g, idxs, degimg = csr.skeleton_to_csgraph(tinycycle)
expected_indptr = [0, 0, 2, 4, 6, 8]
expected_indices = [2, 3, 1, 4, 1, 4, 2, 3]
expected_data = np.sqrt(2)
assert_equal(g.indptr, expected_indptr)
assert_equal(g.indices, expected_indices)
assert_almost_equal(g.data, expected_data)
expected_degrees = np.array([[0, 2, 0], [2, 0, 2], [0, 2, 0]])
assert_equal(degimg, expected_degrees)
assert_equal(idxs, [0, 1, 3, 5, 7])
def test_skeleton1_stats():
args = csr.skeleton_to_csgraph(skeleton1)
stats = csr.branch_statistics(*args)
|
|
087d33b85381c0096be583bbae67ab0030a90f4f
|
python/tkinter/python3/keyboard_events.py
|
python/tkinter/python3/keyboard_events.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: https://github.com/jeremiedecock/pyarm/blob/master/pyarm/gui/tkinter_gui.py
import tkinter as tk
def keypress_callback(event):
print("keypress:", event.char)
def keyrelease_callback(event):
print("keyrelease:", event.char)
def main():
"""Main function"""
root = tk.Tk()
label = tk.Label(root, text="Press some keys")
label.pack()
# SETUP KEYBOARD EVENT CALLBACKS
root.bind("<KeyPress>", keypress_callback)
root.bind("<KeyRelease>", keyrelease_callback)
root.mainloop()
if __name__ == '__main__':
main()
|
Add a snippet (Python physics).
|
Add a snippet (Python physics).
|
Python
|
mit
|
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
|
Add a snippet (Python physics).
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: https://github.com/jeremiedecock/pyarm/blob/master/pyarm/gui/tkinter_gui.py
import tkinter as tk
def keypress_callback(event):
print("keypress:", event.char)
def keyrelease_callback(event):
print("keyrelease:", event.char)
def main():
"""Main function"""
root = tk.Tk()
label = tk.Label(root, text="Press some keys")
label.pack()
# SETUP KEYBOARD EVENT CALLBACKS
root.bind("<KeyPress>", keypress_callback)
root.bind("<KeyRelease>", keyrelease_callback)
root.mainloop()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python physics).<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: https://github.com/jeremiedecock/pyarm/blob/master/pyarm/gui/tkinter_gui.py
import tkinter as tk
def keypress_callback(event):
print("keypress:", event.char)
def keyrelease_callback(event):
print("keyrelease:", event.char)
def main():
"""Main function"""
root = tk.Tk()
label = tk.Label(root, text="Press some keys")
label.pack()
# SETUP KEYBOARD EVENT CALLBACKS
root.bind("<KeyPress>", keypress_callback)
root.bind("<KeyRelease>", keyrelease_callback)
root.mainloop()
if __name__ == '__main__':
main()
|
Add a snippet (Python physics).#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: https://github.com/jeremiedecock/pyarm/blob/master/pyarm/gui/tkinter_gui.py
import tkinter as tk
def keypress_callback(event):
print("keypress:", event.char)
def keyrelease_callback(event):
print("keyrelease:", event.char)
def main():
"""Main function"""
root = tk.Tk()
label = tk.Label(root, text="Press some keys")
label.pack()
# SETUP KEYBOARD EVENT CALLBACKS
root.bind("<KeyPress>", keypress_callback)
root.bind("<KeyRelease>", keyrelease_callback)
root.mainloop()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python physics).<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: https://github.com/jeremiedecock/pyarm/blob/master/pyarm/gui/tkinter_gui.py
import tkinter as tk
def keypress_callback(event):
print("keypress:", event.char)
def keyrelease_callback(event):
print("keyrelease:", event.char)
def main():
"""Main function"""
root = tk.Tk()
label = tk.Label(root, text="Press some keys")
label.pack()
# SETUP KEYBOARD EVENT CALLBACKS
root.bind("<KeyPress>", keypress_callback)
root.bind("<KeyRelease>", keyrelease_callback)
root.mainloop()
if __name__ == '__main__':
main()
|
|
493f6ccfe05bd2b06c21f2a70c552a79be2a3f67
|
py/guess-number-higher-or-lower.py
|
py/guess-number-higher-or-lower.py
|
# The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
L, U = 0, n + 1
while L + 1 < U:
mid = L + (U - L) / 2
g = guess(mid)
if g > 0:
L = mid
elif g < 0:
U = mid
else:
return mid
|
Add py solution for 374. Guess Number Higher or Lower
|
Add py solution for 374. Guess Number Higher or Lower
374. Guess Number Higher or Lower: https://leetcode.com/problems/guess-number-higher-or-lower/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 374. Guess Number Higher or Lower
374. Guess Number Higher or Lower: https://leetcode.com/problems/guess-number-higher-or-lower/
|
# The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
L, U = 0, n + 1
while L + 1 < U:
mid = L + (U - L) / 2
g = guess(mid)
if g > 0:
L = mid
elif g < 0:
U = mid
else:
return mid
|
<commit_before><commit_msg>Add py solution for 374. Guess Number Higher or Lower
374. Guess Number Higher or Lower: https://leetcode.com/problems/guess-number-higher-or-lower/<commit_after>
|
# The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
L, U = 0, n + 1
while L + 1 < U:
mid = L + (U - L) / 2
g = guess(mid)
if g > 0:
L = mid
elif g < 0:
U = mid
else:
return mid
|
Add py solution for 374. Guess Number Higher or Lower
374. Guess Number Higher or Lower: https://leetcode.com/problems/guess-number-higher-or-lower/# The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
L, U = 0, n + 1
while L + 1 < U:
mid = L + (U - L) / 2
g = guess(mid)
if g > 0:
L = mid
elif g < 0:
U = mid
else:
return mid
|
<commit_before><commit_msg>Add py solution for 374. Guess Number Higher or Lower
374. Guess Number Higher or Lower: https://leetcode.com/problems/guess-number-higher-or-lower/<commit_after># The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
# def guess(num):
class Solution(object):
def guessNumber(self, n):
"""
:type n: int
:rtype: int
"""
L, U = 0, n + 1
while L + 1 < U:
mid = L + (U - L) / 2
g = guess(mid)
if g > 0:
L = mid
elif g < 0:
U = mid
else:
return mid
|
|
89fa1ce2fbf16a65d78a8d7bf0f0600240500630
|
html/cli-html-tag-stripper.py
|
html/cli-html-tag-stripper.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from HTMLParser import HTMLParser
# HTML Tag Stripper class
class HTMLTagStripper(HTMLParser):
def __init__(self):
self.reset()
self.data = []
self.isScriptTag = False
self.isStyleTag = False
def handle_starttag(self, tag, attr):
# <script> tag flag
if tag == 'script':
self.isScriptTag = True
elif tag == 'style':
self.isStyleTag = True
def handle_endtag(self, tag):
# </script> (end) tag flag
if tag == 'script':
self.isScriptTag = False
elif tag == 'style':
self.isStyleTag = False
def handle_data(self, data):
# as long as it's not a <script> and <style> tag, store the data value
if not self.isScriptTag and not self.isStyleTag:
# but wait there's more! ...as long as it's not a line feed or tab
if data and data != "\n" and data != "\t":
# store/append the data and while we're at it, strip whitespaces, tags, line feed, and carriage return
self.data.append(data.strip(' \t\n\r'))
def get_data(self):
# filter out null/None nodes
resp = filter(None, self.data)
return ' '.join(resp)
# open the url/page
response = urllib2.urlopen("https://www.python.org/")
html = response.read()
# remove HTML tags
hts = HTMLTagStripper()
hts.feed(html)
# get the data and cast it as string
data = str(hts.get_data())
# print it out!
print(data)
|
Add HTML tag stripper/remover (also remove <script> and <style> contents)
|
Add HTML tag stripper/remover (also remove <script> and <style> contents)
|
Python
|
mit
|
rawswift/python-collections
|
Add HTML tag stripper/remover (also remove <script> and <style> contents)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from HTMLParser import HTMLParser
# HTML Tag Stripper class
class HTMLTagStripper(HTMLParser):
def __init__(self):
self.reset()
self.data = []
self.isScriptTag = False
self.isStyleTag = False
def handle_starttag(self, tag, attr):
# <script> tag flag
if tag == 'script':
self.isScriptTag = True
elif tag == 'style':
self.isStyleTag = True
def handle_endtag(self, tag):
# </script> (end) tag flag
if tag == 'script':
self.isScriptTag = False
elif tag == 'style':
self.isStyleTag = False
def handle_data(self, data):
# as long as it's not a <script> and <style> tag, store the data value
if not self.isScriptTag and not self.isStyleTag:
# but wait there's more! ...as long as it's not a line feed or tab
if data and data != "\n" and data != "\t":
# store/append the data and while we're at it, strip whitespaces, tags, line feed, and carriage return
self.data.append(data.strip(' \t\n\r'))
def get_data(self):
# filter out null/None nodes
resp = filter(None, self.data)
return ' '.join(resp)
# open the url/page
response = urllib2.urlopen("https://www.python.org/")
html = response.read()
# remove HTML tags
hts = HTMLTagStripper()
hts.feed(html)
# get the data and cast it as string
data = str(hts.get_data())
# print it out!
print(data)
|
<commit_before><commit_msg>Add HTML tag stripper/remover (also remove <script> and <style> contents)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from HTMLParser import HTMLParser
# HTML Tag Stripper class
class HTMLTagStripper(HTMLParser):
def __init__(self):
self.reset()
self.data = []
self.isScriptTag = False
self.isStyleTag = False
def handle_starttag(self, tag, attr):
# <script> tag flag
if tag == 'script':
self.isScriptTag = True
elif tag == 'style':
self.isStyleTag = True
def handle_endtag(self, tag):
# </script> (end) tag flag
if tag == 'script':
self.isScriptTag = False
elif tag == 'style':
self.isStyleTag = False
def handle_data(self, data):
# as long as it's not a <script> and <style> tag, store the data value
if not self.isScriptTag and not self.isStyleTag:
# but wait there's more! ...as long as it's not a line feed or tab
if data and data != "\n" and data != "\t":
# store/append the data and while we're at it, strip whitespaces, tags, line feed, and carriage return
self.data.append(data.strip(' \t\n\r'))
def get_data(self):
# filter out null/None nodes
resp = filter(None, self.data)
return ' '.join(resp)
# open the url/page
response = urllib2.urlopen("https://www.python.org/")
html = response.read()
# remove HTML tags
hts = HTMLTagStripper()
hts.feed(html)
# get the data and cast it as string
data = str(hts.get_data())
# print it out!
print(data)
|
Add HTML tag stripper/remover (also remove <script> and <style> contents)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from HTMLParser import HTMLParser
# HTML Tag Stripper class
class HTMLTagStripper(HTMLParser):
def __init__(self):
self.reset()
self.data = []
self.isScriptTag = False
self.isStyleTag = False
def handle_starttag(self, tag, attr):
# <script> tag flag
if tag == 'script':
self.isScriptTag = True
elif tag == 'style':
self.isStyleTag = True
def handle_endtag(self, tag):
# </script> (end) tag flag
if tag == 'script':
self.isScriptTag = False
elif tag == 'style':
self.isStyleTag = False
def handle_data(self, data):
# as long as it's not a <script> and <style> tag, store the data value
if not self.isScriptTag and not self.isStyleTag:
# but wait there's more! ...as long as it's not a line feed or tab
if data and data != "\n" and data != "\t":
# store/append the data and while we're at it, strip whitespaces, tags, line feed, and carriage return
self.data.append(data.strip(' \t\n\r'))
def get_data(self):
# filter out null/None nodes
resp = filter(None, self.data)
return ' '.join(resp)
# open the url/page
response = urllib2.urlopen("https://www.python.org/")
html = response.read()
# remove HTML tags
hts = HTMLTagStripper()
hts.feed(html)
# get the data and cast it as string
data = str(hts.get_data())
# print it out!
print(data)
|
<commit_before><commit_msg>Add HTML tag stripper/remover (also remove <script> and <style> contents)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib2
from HTMLParser import HTMLParser
# HTML Tag Stripper class
class HTMLTagStripper(HTMLParser):
def __init__(self):
self.reset()
self.data = []
self.isScriptTag = False
self.isStyleTag = False
def handle_starttag(self, tag, attr):
# <script> tag flag
if tag == 'script':
self.isScriptTag = True
elif tag == 'style':
self.isStyleTag = True
def handle_endtag(self, tag):
# </script> (end) tag flag
if tag == 'script':
self.isScriptTag = False
elif tag == 'style':
self.isStyleTag = False
def handle_data(self, data):
# as long as it's not a <script> and <style> tag, store the data value
if not self.isScriptTag and not self.isStyleTag:
# but wait there's more! ...as long as it's not a line feed or tab
if data and data != "\n" and data != "\t":
# store/append the data and while we're at it, strip whitespaces, tags, line feed, and carriage return
self.data.append(data.strip(' \t\n\r'))
def get_data(self):
# filter out null/None nodes
resp = filter(None, self.data)
return ' '.join(resp)
# open the url/page
response = urllib2.urlopen("https://www.python.org/")
html = response.read()
# remove HTML tags
hts = HTMLTagStripper()
hts.feed(html)
# get the data and cast it as string
data = str(hts.get_data())
# print it out!
print(data)
|
|
2f5c0a9ee004c4ba48724a987b9f8fa93fbbab2e
|
src/ecu.py
|
src/ecu.py
|
#!/usr/bin/env python3
# coding=utf-8
import serial
class EcuCollector():
SENTINEL = b'X'
RESPONSE_LENGTH = 91
def __init__(self, port, baudrate=9600):
self.port = port
self.serial_conn = serial.Serial(port, baudrate)
def get_data(self):
"""
Reads data from the ECU and returns it in a dictionary.
"""
self.serial_conn.write(self.SENTINEL)
data = self.serial_conn.read(self.RESPONSE_LENGTH)
results = {
'engine_speed': __get_vehicle_speed(data),
'tachometer': __get_engine_speed(data),
'temp': __get_engine_temp(data)
}
return results
def __get_engine_speed(data):
""" Returns the engine speed in RPM """
return data[4] * 50 + data[5]
def __get_engine_temp(data):
""" Returns the engine temperature in degrees Farenheit """
temp_celsius = data[7]
return (temp_celsius * 1.8) + 32
def __get_vehicle_speed(data):
""" Returns the vehicle speed in MPH (as determined by the ECU) """
return data[27]
|
Add collection class for stealing data from the ECU
|
Add collection class for stealing data from the ECU
|
Python
|
epl-1.0
|
MSOE-Supermileage/datacollector,MSOE-Supermileage/datacollector,MSOE-Supermileage/datacollector
|
Add collection class for stealing data from the ECU
|
#!/usr/bin/env python3
# coding=utf-8
import serial
class EcuCollector():
SENTINEL = b'X'
RESPONSE_LENGTH = 91
def __init__(self, port, baudrate=9600):
self.port = port
self.serial_conn = serial.Serial(port, baudrate)
def get_data(self):
"""
Reads data from the ECU and returns it in a dictionary.
"""
self.serial_conn.write(self.SENTINEL)
data = self.serial_conn.read(self.RESPONSE_LENGTH)
results = {
'engine_speed': __get_vehicle_speed(data),
'tachometer': __get_engine_speed(data),
'temp': __get_engine_temp(data)
}
return results
def __get_engine_speed(data):
""" Returns the engine speed in RPM """
return data[4] * 50 + data[5]
def __get_engine_temp(data):
""" Returns the engine temperature in degrees Farenheit """
temp_celsius = data[7]
return (temp_celsius * 1.8) + 32
def __get_vehicle_speed(data):
""" Returns the vehicle speed in MPH (as determined by the ECU) """
return data[27]
|
<commit_before><commit_msg>Add collection class for stealing data from the ECU<commit_after>
|
#!/usr/bin/env python3
# coding=utf-8
import serial
class EcuCollector():
SENTINEL = b'X'
RESPONSE_LENGTH = 91
def __init__(self, port, baudrate=9600):
self.port = port
self.serial_conn = serial.Serial(port, baudrate)
def get_data(self):
"""
Reads data from the ECU and returns it in a dictionary.
"""
self.serial_conn.write(self.SENTINEL)
data = self.serial_conn.read(self.RESPONSE_LENGTH)
results = {
'engine_speed': __get_vehicle_speed(data),
'tachometer': __get_engine_speed(data),
'temp': __get_engine_temp(data)
}
return results
def __get_engine_speed(data):
""" Returns the engine speed in RPM """
return data[4] * 50 + data[5]
def __get_engine_temp(data):
""" Returns the engine temperature in degrees Farenheit """
temp_celsius = data[7]
return (temp_celsius * 1.8) + 32
def __get_vehicle_speed(data):
""" Returns the vehicle speed in MPH (as determined by the ECU) """
return data[27]
|
Add collection class for stealing data from the ECU#!/usr/bin/env python3
# coding=utf-8
import serial
class EcuCollector():
SENTINEL = b'X'
RESPONSE_LENGTH = 91
def __init__(self, port, baudrate=9600):
self.port = port
self.serial_conn = serial.Serial(port, baudrate)
def get_data(self):
"""
Reads data from the ECU and returns it in a dictionary.
"""
self.serial_conn.write(self.SENTINEL)
data = self.serial_conn.read(self.RESPONSE_LENGTH)
results = {
'engine_speed': __get_vehicle_speed(data),
'tachometer': __get_engine_speed(data),
'temp': __get_engine_temp(data)
}
return results
def __get_engine_speed(data):
""" Returns the engine speed in RPM """
return data[4] * 50 + data[5]
def __get_engine_temp(data):
""" Returns the engine temperature in degrees Farenheit """
temp_celsius = data[7]
return (temp_celsius * 1.8) + 32
def __get_vehicle_speed(data):
""" Returns the vehicle speed in MPH (as determined by the ECU) """
return data[27]
|
<commit_before><commit_msg>Add collection class for stealing data from the ECU<commit_after>#!/usr/bin/env python3
# coding=utf-8
import serial
class EcuCollector():
SENTINEL = b'X'
RESPONSE_LENGTH = 91
def __init__(self, port, baudrate=9600):
self.port = port
self.serial_conn = serial.Serial(port, baudrate)
def get_data(self):
"""
Reads data from the ECU and returns it in a dictionary.
"""
self.serial_conn.write(self.SENTINEL)
data = self.serial_conn.read(self.RESPONSE_LENGTH)
results = {
'engine_speed': __get_vehicle_speed(data),
'tachometer': __get_engine_speed(data),
'temp': __get_engine_temp(data)
}
return results
def __get_engine_speed(data):
""" Returns the engine speed in RPM """
return data[4] * 50 + data[5]
def __get_engine_temp(data):
""" Returns the engine temperature in degrees Farenheit """
temp_celsius = data[7]
return (temp_celsius * 1.8) + 32
def __get_vehicle_speed(data):
""" Returns the vehicle speed in MPH (as determined by the ECU) """
return data[27]
|
|
31822d7cdc77647bf06d2b3d32c6e72c80f55815
|
opal/management/commands/create_singletons.py
|
opal/management/commands/create_singletons.py
|
"""
Create singletons that may have been dropped
"""
import collections
import json
from optparse import make_option
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from opal.models import Patient, Episode, PatientSubrecord, EpisodeSubrecord
class Command(BaseCommand):
def handle(self, *args, **options):
print "Creating Singletons"
for patient in Patient.objects.all():
print 'Examining', patient
for subclass in PatientSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(patient=patient).count() == 0:
print 'Creating', subclass
subclass.objects.create(patient=patient)
for episode in Episode.objects.all():
print 'Examining', episode
for subclass in EpisodeSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(episode=episode).count() == 0:
print 'Creating', subclass
subclass.objects.create(episode=episode)
return
|
Add a management command to create trailing singletons.
|
Add a management command to create trailing singletons.
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
Add a management command to create trailing singletons.
|
"""
Create singletons that may have been dropped
"""
import collections
import json
from optparse import make_option
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from opal.models import Patient, Episode, PatientSubrecord, EpisodeSubrecord
class Command(BaseCommand):
def handle(self, *args, **options):
print "Creating Singletons"
for patient in Patient.objects.all():
print 'Examining', patient
for subclass in PatientSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(patient=patient).count() == 0:
print 'Creating', subclass
subclass.objects.create(patient=patient)
for episode in Episode.objects.all():
print 'Examining', episode
for subclass in EpisodeSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(episode=episode).count() == 0:
print 'Creating', subclass
subclass.objects.create(episode=episode)
return
|
<commit_before><commit_msg>Add a management command to create trailing singletons.<commit_after>
|
"""
Create singletons that may have been dropped
"""
import collections
import json
from optparse import make_option
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from opal.models import Patient, Episode, PatientSubrecord, EpisodeSubrecord
class Command(BaseCommand):
def handle(self, *args, **options):
print "Creating Singletons"
for patient in Patient.objects.all():
print 'Examining', patient
for subclass in PatientSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(patient=patient).count() == 0:
print 'Creating', subclass
subclass.objects.create(patient=patient)
for episode in Episode.objects.all():
print 'Examining', episode
for subclass in EpisodeSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(episode=episode).count() == 0:
print 'Creating', subclass
subclass.objects.create(episode=episode)
return
|
Add a management command to create trailing singletons."""
Create singletons that may have been dropped
"""
import collections
import json
from optparse import make_option
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from opal.models import Patient, Episode, PatientSubrecord, EpisodeSubrecord
class Command(BaseCommand):
def handle(self, *args, **options):
print "Creating Singletons"
for patient in Patient.objects.all():
print 'Examining', patient
for subclass in PatientSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(patient=patient).count() == 0:
print 'Creating', subclass
subclass.objects.create(patient=patient)
for episode in Episode.objects.all():
print 'Examining', episode
for subclass in EpisodeSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(episode=episode).count() == 0:
print 'Creating', subclass
subclass.objects.create(episode=episode)
return
|
<commit_before><commit_msg>Add a management command to create trailing singletons.<commit_after>"""
Create singletons that may have been dropped
"""
import collections
import json
from optparse import make_option
from django.contrib.contenttypes.models import ContentType
from django.core.management.base import BaseCommand
from opal.models import Patient, Episode, PatientSubrecord, EpisodeSubrecord
class Command(BaseCommand):
def handle(self, *args, **options):
print "Creating Singletons"
for patient in Patient.objects.all():
print 'Examining', patient
for subclass in PatientSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(patient=patient).count() == 0:
print 'Creating', subclass
subclass.objects.create(patient=patient)
for episode in Episode.objects.all():
print 'Examining', episode
for subclass in EpisodeSubrecord.__subclasses__():
if subclass._is_singleton:
if subclass.objects.filter(episode=episode).count() == 0:
print 'Creating', subclass
subclass.objects.create(episode=episode)
return
|
|
d6d4601a5a1238cf7d2f0a6bdf782a77318d28a3
|
tests_django/integration_tests/test_output_adapter_integration.py
|
tests_django/integration_tests/test_output_adapter_integration.py
|
from django.test import TestCase
from chatterbot.ext.django_chatterbot.models import Statement
class OutputIntegrationTestCase(TestCase):
"""
Tests to make sure that output adapters
function correctly when using Django.
"""
def test_output_format_adapter(self):
from chatterbot.output import OutputFormatAdapter
adapter = OutputFormatAdapter()
statement = Statement(text='_')
result = adapter.process_response(statement)
self.assertEqual(result.text, '_')
|
Test that the output format adapter works with Django
|
Test that the output format adapter works with Django
|
Python
|
bsd-3-clause
|
vkosuri/ChatterBot,Reinaesaya/OUIRL-ChatBot,maclogan/VirtualPenPal,gunthercox/ChatterBot,davizucon/ChatterBot,Gustavo6046/ChatterBot,Reinaesaya/OUIRL-ChatBot
|
Test that the output format adapter works with Django
|
from django.test import TestCase
from chatterbot.ext.django_chatterbot.models import Statement
class OutputIntegrationTestCase(TestCase):
"""
Tests to make sure that output adapters
function correctly when using Django.
"""
def test_output_format_adapter(self):
from chatterbot.output import OutputFormatAdapter
adapter = OutputFormatAdapter()
statement = Statement(text='_')
result = adapter.process_response(statement)
self.assertEqual(result.text, '_')
|
<commit_before><commit_msg>Test that the output format adapter works with Django<commit_after>
|
from django.test import TestCase
from chatterbot.ext.django_chatterbot.models import Statement
class OutputIntegrationTestCase(TestCase):
"""
Tests to make sure that output adapters
function correctly when using Django.
"""
def test_output_format_adapter(self):
from chatterbot.output import OutputFormatAdapter
adapter = OutputFormatAdapter()
statement = Statement(text='_')
result = adapter.process_response(statement)
self.assertEqual(result.text, '_')
|
Test that the output format adapter works with Djangofrom django.test import TestCase
from chatterbot.ext.django_chatterbot.models import Statement
class OutputIntegrationTestCase(TestCase):
"""
Tests to make sure that output adapters
function correctly when using Django.
"""
def test_output_format_adapter(self):
from chatterbot.output import OutputFormatAdapter
adapter = OutputFormatAdapter()
statement = Statement(text='_')
result = adapter.process_response(statement)
self.assertEqual(result.text, '_')
|
<commit_before><commit_msg>Test that the output format adapter works with Django<commit_after>from django.test import TestCase
from chatterbot.ext.django_chatterbot.models import Statement
class OutputIntegrationTestCase(TestCase):
"""
Tests to make sure that output adapters
function correctly when using Django.
"""
def test_output_format_adapter(self):
from chatterbot.output import OutputFormatAdapter
adapter = OutputFormatAdapter()
statement = Statement(text='_')
result = adapter.process_response(statement)
self.assertEqual(result.text, '_')
|
|
bb70a98437c87fa5b9677716acbcbd948d93f982
|
tests/syft/grid/messages/setup_msg_test.py
|
tests/syft/grid/messages/setup_msg_test.py
|
# syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
|
ADD PyGrid SetupService message tests
|
ADD PyGrid SetupService message tests
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
ADD PyGrid SetupService message tests
|
# syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
|
<commit_before><commit_msg>ADD PyGrid SetupService message tests<commit_after>
|
# syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
|
ADD PyGrid SetupService message tests# syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
|
<commit_before><commit_msg>ADD PyGrid SetupService message tests<commit_after># syft absolute
import syft as sy
from syft.core.io.address import Address
from syft.grid.messages.setup_messages import CreateInitialSetUpMessage
from syft.grid.messages.setup_messages import CreateInitialSetUpResponse
from syft.grid.messages.setup_messages import GetSetUpMessage
from syft.grid.messages.setup_messages import GetSetUpResponse
def test_create_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = CreateInitialSetUpMessage(
address=target,
content= request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_create_initial_setup_response_serde() -> None:
target = Address(name="Alice")
request_content = {"msg": "Initial setup registered successfully!"}
msg = CreateInitialSetUpResponse(
address=target,
success=True,
content= request_content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_get_initial_setup_message_serde() -> None:
bob_vm = sy.VirtualMachine(name="Bob")
target = Address(name="Alice")
request_content = {}
msg = GetSetUpMessage(
address=target,
content=request_content,
reply_to=bob_vm.address,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
def test_delete_worker_response_serde() -> None:
target = Address(name="Alice")
content = {
"settings": {
"cloud-admin-token" : "d84we35ad3a1d59a84sd9",
"cloud-credentials": "<cloud-credentials.pem>",
"infra": {
"autoscaling": True,
"triggers": {
"memory": "50",
"vCPU": "80"
}
},
}
}
msg = GetSetUpResponse(
success=True,
address=target,
content=content,
)
blob = msg.serialize()
msg2 = sy.deserialize(blob=blob)
assert msg.id == msg2.id
assert msg.address == target
assert msg.content == msg2.content
assert msg == msg2
|
|
039afd96fd66844e3d0ac031458c976d74aca325
|
infra/bots/recipe_modules/flavor/__init__.py
|
infra/bots/recipe_modules/flavor/__init__.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
Remove unnecessary depot_tools dependency in flavor module
|
[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com>
|
Python
|
bsd-3-clause
|
HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,google/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,google/skia,google/skia,HalCanary/skia-hc,google/skia,HalCanary/skia-hc,aosp-mirror/platform_external_skia,google/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com>
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
<commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
<commit_msg>[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com><commit_after>
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
<commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'depot_tools/bot_update',
'depot_tools/cipd',
'depot_tools/gclient',
'depot_tools/git',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
<commit_msg>[recipes] Remove unnecessary depot_tools dependency in flavor module
Change-Id: Ic1f3896a450bd81bb8c4859d3998c9873af821f6
Reviewed-on: https://skia-review.googlesource.com/c/skia/+/263016
Reviewed-by: Ravi Mistry <9fa2e7438b8cb730f96b74865492597170561628@google.com>
Commit-Queue: Eric Boren <0e499112533c8544f0505ea0d08394fb5ad7d8fa@google.com><commit_after># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
DEPS = [
'builder_name_schema',
'docker',
'env',
'infra',
'recipe_engine/context',
'recipe_engine/file',
'recipe_engine/json',
'recipe_engine/path',
'recipe_engine/platform',
'recipe_engine/python',
'recipe_engine/raw_io',
'recipe_engine/step',
'run',
'vars',
]
|
8aa36c52a2a2e3e24b5d36251416ecd4a9aba0f1
|
utils/logstats.py
|
utils/logstats.py
|
import re
import sys
from collections import Counter
def oneof(strs):
return "(?:%s)" % "|".join(strs)
BRIDGE_REGEX = r"# bridge out of Guard (0x[0-9a-fA-f]*) with \d* ops"
bridge_regex = re.compile(BRIDGE_REGEX)
def main(args):
for fname in sorted(args):
with open(fname, 'r') as infile:
data = infile.read()
bridge_ids = bridge_regex.findall(data)
all_ids = oneof(bridge_ids)
guard_regex = r"\+\d*: (guard_[^(]*)\(.*descr=<Guard%s>\).*" % all_ids
guard_regex = re.compile(guard_regex)
guards = guard_regex.findall(data)
counter = Counter(guards)
print(fname)
for k, v in sorted(counter.items(), key=lambda x: x[0]):
print("{:<20} {}".format(k + ":", v))
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
Add trace log stats extractor
|
Add trace log stats extractor
|
Python
|
mit
|
samth/pycket,magnusmorton/pycket,magnusmorton/pycket,magnusmorton/pycket,pycket/pycket,pycket/pycket,pycket/pycket,cderici/pycket,cderici/pycket,samth/pycket,cderici/pycket,samth/pycket
|
Add trace log stats extractor
|
import re
import sys
from collections import Counter
def oneof(strs):
return "(?:%s)" % "|".join(strs)
BRIDGE_REGEX = r"# bridge out of Guard (0x[0-9a-fA-f]*) with \d* ops"
bridge_regex = re.compile(BRIDGE_REGEX)
def main(args):
for fname in sorted(args):
with open(fname, 'r') as infile:
data = infile.read()
bridge_ids = bridge_regex.findall(data)
all_ids = oneof(bridge_ids)
guard_regex = r"\+\d*: (guard_[^(]*)\(.*descr=<Guard%s>\).*" % all_ids
guard_regex = re.compile(guard_regex)
guards = guard_regex.findall(data)
counter = Counter(guards)
print(fname)
for k, v in sorted(counter.items(), key=lambda x: x[0]):
print("{:<20} {}".format(k + ":", v))
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before><commit_msg>Add trace log stats extractor<commit_after>
|
import re
import sys
from collections import Counter
def oneof(strs):
return "(?:%s)" % "|".join(strs)
BRIDGE_REGEX = r"# bridge out of Guard (0x[0-9a-fA-f]*) with \d* ops"
bridge_regex = re.compile(BRIDGE_REGEX)
def main(args):
for fname in sorted(args):
with open(fname, 'r') as infile:
data = infile.read()
bridge_ids = bridge_regex.findall(data)
all_ids = oneof(bridge_ids)
guard_regex = r"\+\d*: (guard_[^(]*)\(.*descr=<Guard%s>\).*" % all_ids
guard_regex = re.compile(guard_regex)
guards = guard_regex.findall(data)
counter = Counter(guards)
print(fname)
for k, v in sorted(counter.items(), key=lambda x: x[0]):
print("{:<20} {}".format(k + ":", v))
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
Add trace log stats extractor
import re
import sys
from collections import Counter
def oneof(strs):
return "(?:%s)" % "|".join(strs)
BRIDGE_REGEX = r"# bridge out of Guard (0x[0-9a-fA-f]*) with \d* ops"
bridge_regex = re.compile(BRIDGE_REGEX)
def main(args):
for fname in sorted(args):
with open(fname, 'r') as infile:
data = infile.read()
bridge_ids = bridge_regex.findall(data)
all_ids = oneof(bridge_ids)
guard_regex = r"\+\d*: (guard_[^(]*)\(.*descr=<Guard%s>\).*" % all_ids
guard_regex = re.compile(guard_regex)
guards = guard_regex.findall(data)
counter = Counter(guards)
print(fname)
for k, v in sorted(counter.items(), key=lambda x: x[0]):
print("{:<20} {}".format(k + ":", v))
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
<commit_before><commit_msg>Add trace log stats extractor<commit_after>
import re
import sys
from collections import Counter
def oneof(strs):
return "(?:%s)" % "|".join(strs)
BRIDGE_REGEX = r"# bridge out of Guard (0x[0-9a-fA-f]*) with \d* ops"
bridge_regex = re.compile(BRIDGE_REGEX)
def main(args):
for fname in sorted(args):
with open(fname, 'r') as infile:
data = infile.read()
bridge_ids = bridge_regex.findall(data)
all_ids = oneof(bridge_ids)
guard_regex = r"\+\d*: (guard_[^(]*)\(.*descr=<Guard%s>\).*" % all_ids
guard_regex = re.compile(guard_regex)
guards = guard_regex.findall(data)
counter = Counter(guards)
print(fname)
for k, v in sorted(counter.items(), key=lambda x: x[0]):
print("{:<20} {}".format(k + ":", v))
print("")
if __name__ == '__main__':
main(sys.argv[1:])
|
|
7279b4f4a96472cd78bfb645b8c6d0aac4290fd7
|
Exscript/protocols/drivers/cienasaos.py
|
Exscript/protocols/drivers/cienasaos.py
|
"""
A driver for Ciena SAOS carrier ethernet devices
"""
import re
from Exscript.protocols.drivers.driver import Driver
_user_re = [re.compile(r'[^:]* login: ?$', re.I)]
_password_re = [re.compile(r'Password: ?$')]
_prompt_re = [re.compile(r'[\r\n][\-\w+\.:/]+[>#] ?$')]
_error_re = [re.compile(r'SHELL PARSER FAILURE'),
re.compile(r'invalid input', re.I),
re.compile(r'(?:incomplete|ambiguous) command', re.I),
re.compile(r'connection timed out', re.I),
re.compile(r'[^\r\n]+ not found', re.I)]
class CienaSAOSDriver(Driver):
def __init__(self):
Driver.__init__(self, 'cienasaos')
self.user_re = _user_re
self.password_re = _password_re
self.prompt_re = _prompt_re
self.error_re = _error_re
def check_head_for_os(self, string):
if 'SAOS is True Carrier Ethernet TM software' in string:
return 90
return 0
def init_terminal(self, conn):
conn.execute('system shell session set more off')
|
Add a driver for Ciena SAOS devices
|
Add a driver for Ciena SAOS devices
Ciena carrier ethernet devices - previous Worldwide Packets
|
Python
|
mit
|
knipknap/exscript,maximumG/exscript,knipknap/exscript,maximumG/exscript
|
Add a driver for Ciena SAOS devices
Ciena carrier ethernet devices - previous Worldwide Packets
|
"""
A driver for Ciena SAOS carrier ethernet devices
"""
import re
from Exscript.protocols.drivers.driver import Driver
_user_re = [re.compile(r'[^:]* login: ?$', re.I)]
_password_re = [re.compile(r'Password: ?$')]
_prompt_re = [re.compile(r'[\r\n][\-\w+\.:/]+[>#] ?$')]
_error_re = [re.compile(r'SHELL PARSER FAILURE'),
re.compile(r'invalid input', re.I),
re.compile(r'(?:incomplete|ambiguous) command', re.I),
re.compile(r'connection timed out', re.I),
re.compile(r'[^\r\n]+ not found', re.I)]
class CienaSAOSDriver(Driver):
def __init__(self):
Driver.__init__(self, 'cienasaos')
self.user_re = _user_re
self.password_re = _password_re
self.prompt_re = _prompt_re
self.error_re = _error_re
def check_head_for_os(self, string):
if 'SAOS is True Carrier Ethernet TM software' in string:
return 90
return 0
def init_terminal(self, conn):
conn.execute('system shell session set more off')
|
<commit_before><commit_msg>Add a driver for Ciena SAOS devices
Ciena carrier ethernet devices - previous Worldwide Packets<commit_after>
|
"""
A driver for Ciena SAOS carrier ethernet devices
"""
import re
from Exscript.protocols.drivers.driver import Driver
_user_re = [re.compile(r'[^:]* login: ?$', re.I)]
_password_re = [re.compile(r'Password: ?$')]
_prompt_re = [re.compile(r'[\r\n][\-\w+\.:/]+[>#] ?$')]
_error_re = [re.compile(r'SHELL PARSER FAILURE'),
re.compile(r'invalid input', re.I),
re.compile(r'(?:incomplete|ambiguous) command', re.I),
re.compile(r'connection timed out', re.I),
re.compile(r'[^\r\n]+ not found', re.I)]
class CienaSAOSDriver(Driver):
def __init__(self):
Driver.__init__(self, 'cienasaos')
self.user_re = _user_re
self.password_re = _password_re
self.prompt_re = _prompt_re
self.error_re = _error_re
def check_head_for_os(self, string):
if 'SAOS is True Carrier Ethernet TM software' in string:
return 90
return 0
def init_terminal(self, conn):
conn.execute('system shell session set more off')
|
Add a driver for Ciena SAOS devices
Ciena carrier ethernet devices - previous Worldwide Packets"""
A driver for Ciena SAOS carrier ethernet devices
"""
import re
from Exscript.protocols.drivers.driver import Driver
_user_re = [re.compile(r'[^:]* login: ?$', re.I)]
_password_re = [re.compile(r'Password: ?$')]
_prompt_re = [re.compile(r'[\r\n][\-\w+\.:/]+[>#] ?$')]
_error_re = [re.compile(r'SHELL PARSER FAILURE'),
re.compile(r'invalid input', re.I),
re.compile(r'(?:incomplete|ambiguous) command', re.I),
re.compile(r'connection timed out', re.I),
re.compile(r'[^\r\n]+ not found', re.I)]
class CienaSAOSDriver(Driver):
def __init__(self):
Driver.__init__(self, 'cienasaos')
self.user_re = _user_re
self.password_re = _password_re
self.prompt_re = _prompt_re
self.error_re = _error_re
def check_head_for_os(self, string):
if 'SAOS is True Carrier Ethernet TM software' in string:
return 90
return 0
def init_terminal(self, conn):
conn.execute('system shell session set more off')
|
<commit_before><commit_msg>Add a driver for Ciena SAOS devices
Ciena carrier ethernet devices - previous Worldwide Packets<commit_after>"""
A driver for Ciena SAOS carrier ethernet devices
"""
import re
from Exscript.protocols.drivers.driver import Driver
_user_re = [re.compile(r'[^:]* login: ?$', re.I)]
_password_re = [re.compile(r'Password: ?$')]
_prompt_re = [re.compile(r'[\r\n][\-\w+\.:/]+[>#] ?$')]
_error_re = [re.compile(r'SHELL PARSER FAILURE'),
re.compile(r'invalid input', re.I),
re.compile(r'(?:incomplete|ambiguous) command', re.I),
re.compile(r'connection timed out', re.I),
re.compile(r'[^\r\n]+ not found', re.I)]
class CienaSAOSDriver(Driver):
def __init__(self):
Driver.__init__(self, 'cienasaos')
self.user_re = _user_re
self.password_re = _password_re
self.prompt_re = _prompt_re
self.error_re = _error_re
def check_head_for_os(self, string):
if 'SAOS is True Carrier Ethernet TM software' in string:
return 90
return 0
def init_terminal(self, conn):
conn.execute('system shell session set more off')
|
|
60edb041e6096f37cc451acb77a44f421c37d910
|
tests/cupy_tests/core_tests/test_core.py
|
tests/cupy_tests/core_tests/test_core.py
|
import unittest
import cupy
from cupy.core import core
class TestGetSize(unittest.TestCase):
def test_none(self):
self.assertEqual(core.get_size(None), ())
def test_list(self):
self.assertEqual(core.get_size([1, 2]), (1, 2))
def test_tuple(self):
self.assertEqual(core.get_size((1, 2)), (1, 2))
def test_int(self):
self.assertEqual(core.get_size(1), (1,))
def test_invalid(self):
with self.assertRaises(ValueError):
core.get_size(1.0)
class TestInternalProd(unittest.TestCase):
def test_empty(self):
self.assertEqual(core.internal_prod([]), 1)
def test_one(self):
self.assertEqual(core.internal_prod([2]), 2)
def test_two(self):
self.assertEqual(core.internal_prod([2, 3]), 6)
class TestGetStridesForNocopyReshape(unittest.TestCase):
def test_different_size(self):
a = core.ndarray((2, 3))
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 5)),
[])
def test_one(self):
a = core.ndarray((1,), dtype=cupy.int32)
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 1, 1)),
[4, 4, 4])
def test_normal(self):
# TODO(nno): write test for normal case
pass
class TestGetContiguousStrides(unittest.TestCase):
def test_zero(self):
self.assertEqual(core._get_contiguous_strides((), 1), [])
def test_one(self):
self.assertEqual(core._get_contiguous_strides((1,), 2), [2])
def test_two(self):
self.assertEqual(core._get_contiguous_strides((1, 2), 3), [6, 3])
def test_three(self):
self.assertEqual(core._get_contiguous_strides((1, 2, 3), 4),
[24, 12, 4])
class TestGetCContiguity(unittest.TestCase):
def test_zero_in_shape(self):
self.assertTrue(core._get_c_contiguity((1, 0, 1), (1, 1, 1), 3))
def test_normal(self):
# TODO(unno): write test for normal case
pass
class TestInferUnknownDimension(unittest.TestCase):
def test_known_all(self):
self.assertEqual(core._infer_unknown_dimension((1, 2, 3), 6),
[1, 2, 3])
def test_multiple_unknown(self):
with self.assertRaises(ValueError):
core._infer_unknown_dimension((-1, 1, -1), 10)
def test_infer(self):
self.assertEqual(core._infer_unknown_dimension((-1, 2, 3), 12),
[2, 2, 3])
|
Write test for core module of cupy
|
Write test for core module of cupy
|
Python
|
mit
|
delta2323/chainer,AlpacaDB/chainer,benob/chainer,okuta/chainer,niboshi/chainer,benob/chainer,muupan/chainer,hvy/chainer,t-abe/chainer,chainer/chainer,kiyukuta/chainer,cupy/cupy,muupan/chainer,AlpacaDB/chainer,ktnyt/chainer,anaruse/chainer,jnishi/chainer,ysekky/chainer,wkentaro/chainer,t-abe/chainer,chainer/chainer,jnishi/chainer,hvy/chainer,ktnyt/chainer,ronekko/chainer,keisuke-umezawa/chainer,sinhrks/chainer,truongdq/chainer,jnishi/chainer,tkerola/chainer,niboshi/chainer,kikusu/chainer,wkentaro/chainer,cupy/cupy,cemoody/chainer,keisuke-umezawa/chainer,hvy/chainer,pfnet/chainer,chainer/chainer,keisuke-umezawa/chainer,okuta/chainer,kikusu/chainer,niboshi/chainer,ktnyt/chainer,okuta/chainer,aonotas/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,kashif/chainer,sinhrks/chainer,jnishi/chainer,rezoo/chainer,chainer/chainer,cupy/cupy,truongdq/chainer,cupy/cupy,wkentaro/chainer,niboshi/chainer,wkentaro/chainer,hvy/chainer
|
Write test for core module of cupy
|
import unittest
import cupy
from cupy.core import core
class TestGetSize(unittest.TestCase):
def test_none(self):
self.assertEqual(core.get_size(None), ())
def test_list(self):
self.assertEqual(core.get_size([1, 2]), (1, 2))
def test_tuple(self):
self.assertEqual(core.get_size((1, 2)), (1, 2))
def test_int(self):
self.assertEqual(core.get_size(1), (1,))
def test_invalid(self):
with self.assertRaises(ValueError):
core.get_size(1.0)
class TestInternalProd(unittest.TestCase):
def test_empty(self):
self.assertEqual(core.internal_prod([]), 1)
def test_one(self):
self.assertEqual(core.internal_prod([2]), 2)
def test_two(self):
self.assertEqual(core.internal_prod([2, 3]), 6)
class TestGetStridesForNocopyReshape(unittest.TestCase):
def test_different_size(self):
a = core.ndarray((2, 3))
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 5)),
[])
def test_one(self):
a = core.ndarray((1,), dtype=cupy.int32)
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 1, 1)),
[4, 4, 4])
def test_normal(self):
# TODO(nno): write test for normal case
pass
class TestGetContiguousStrides(unittest.TestCase):
def test_zero(self):
self.assertEqual(core._get_contiguous_strides((), 1), [])
def test_one(self):
self.assertEqual(core._get_contiguous_strides((1,), 2), [2])
def test_two(self):
self.assertEqual(core._get_contiguous_strides((1, 2), 3), [6, 3])
def test_three(self):
self.assertEqual(core._get_contiguous_strides((1, 2, 3), 4),
[24, 12, 4])
class TestGetCContiguity(unittest.TestCase):
def test_zero_in_shape(self):
self.assertTrue(core._get_c_contiguity((1, 0, 1), (1, 1, 1), 3))
def test_normal(self):
# TODO(unno): write test for normal case
pass
class TestInferUnknownDimension(unittest.TestCase):
def test_known_all(self):
self.assertEqual(core._infer_unknown_dimension((1, 2, 3), 6),
[1, 2, 3])
def test_multiple_unknown(self):
with self.assertRaises(ValueError):
core._infer_unknown_dimension((-1, 1, -1), 10)
def test_infer(self):
self.assertEqual(core._infer_unknown_dimension((-1, 2, 3), 12),
[2, 2, 3])
|
<commit_before><commit_msg>Write test for core module of cupy<commit_after>
|
import unittest
import cupy
from cupy.core import core
class TestGetSize(unittest.TestCase):
def test_none(self):
self.assertEqual(core.get_size(None), ())
def test_list(self):
self.assertEqual(core.get_size([1, 2]), (1, 2))
def test_tuple(self):
self.assertEqual(core.get_size((1, 2)), (1, 2))
def test_int(self):
self.assertEqual(core.get_size(1), (1,))
def test_invalid(self):
with self.assertRaises(ValueError):
core.get_size(1.0)
class TestInternalProd(unittest.TestCase):
def test_empty(self):
self.assertEqual(core.internal_prod([]), 1)
def test_one(self):
self.assertEqual(core.internal_prod([2]), 2)
def test_two(self):
self.assertEqual(core.internal_prod([2, 3]), 6)
class TestGetStridesForNocopyReshape(unittest.TestCase):
def test_different_size(self):
a = core.ndarray((2, 3))
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 5)),
[])
def test_one(self):
a = core.ndarray((1,), dtype=cupy.int32)
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 1, 1)),
[4, 4, 4])
def test_normal(self):
# TODO(nno): write test for normal case
pass
class TestGetContiguousStrides(unittest.TestCase):
def test_zero(self):
self.assertEqual(core._get_contiguous_strides((), 1), [])
def test_one(self):
self.assertEqual(core._get_contiguous_strides((1,), 2), [2])
def test_two(self):
self.assertEqual(core._get_contiguous_strides((1, 2), 3), [6, 3])
def test_three(self):
self.assertEqual(core._get_contiguous_strides((1, 2, 3), 4),
[24, 12, 4])
class TestGetCContiguity(unittest.TestCase):
def test_zero_in_shape(self):
self.assertTrue(core._get_c_contiguity((1, 0, 1), (1, 1, 1), 3))
def test_normal(self):
# TODO(unno): write test for normal case
pass
class TestInferUnknownDimension(unittest.TestCase):
def test_known_all(self):
self.assertEqual(core._infer_unknown_dimension((1, 2, 3), 6),
[1, 2, 3])
def test_multiple_unknown(self):
with self.assertRaises(ValueError):
core._infer_unknown_dimension((-1, 1, -1), 10)
def test_infer(self):
self.assertEqual(core._infer_unknown_dimension((-1, 2, 3), 12),
[2, 2, 3])
|
Write test for core module of cupyimport unittest
import cupy
from cupy.core import core
class TestGetSize(unittest.TestCase):
def test_none(self):
self.assertEqual(core.get_size(None), ())
def test_list(self):
self.assertEqual(core.get_size([1, 2]), (1, 2))
def test_tuple(self):
self.assertEqual(core.get_size((1, 2)), (1, 2))
def test_int(self):
self.assertEqual(core.get_size(1), (1,))
def test_invalid(self):
with self.assertRaises(ValueError):
core.get_size(1.0)
class TestInternalProd(unittest.TestCase):
def test_empty(self):
self.assertEqual(core.internal_prod([]), 1)
def test_one(self):
self.assertEqual(core.internal_prod([2]), 2)
def test_two(self):
self.assertEqual(core.internal_prod([2, 3]), 6)
class TestGetStridesForNocopyReshape(unittest.TestCase):
def test_different_size(self):
a = core.ndarray((2, 3))
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 5)),
[])
def test_one(self):
a = core.ndarray((1,), dtype=cupy.int32)
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 1, 1)),
[4, 4, 4])
def test_normal(self):
# TODO(nno): write test for normal case
pass
class TestGetContiguousStrides(unittest.TestCase):
def test_zero(self):
self.assertEqual(core._get_contiguous_strides((), 1), [])
def test_one(self):
self.assertEqual(core._get_contiguous_strides((1,), 2), [2])
def test_two(self):
self.assertEqual(core._get_contiguous_strides((1, 2), 3), [6, 3])
def test_three(self):
self.assertEqual(core._get_contiguous_strides((1, 2, 3), 4),
[24, 12, 4])
class TestGetCContiguity(unittest.TestCase):
def test_zero_in_shape(self):
self.assertTrue(core._get_c_contiguity((1, 0, 1), (1, 1, 1), 3))
def test_normal(self):
# TODO(unno): write test for normal case
pass
class TestInferUnknownDimension(unittest.TestCase):
def test_known_all(self):
self.assertEqual(core._infer_unknown_dimension((1, 2, 3), 6),
[1, 2, 3])
def test_multiple_unknown(self):
with self.assertRaises(ValueError):
core._infer_unknown_dimension((-1, 1, -1), 10)
def test_infer(self):
self.assertEqual(core._infer_unknown_dimension((-1, 2, 3), 12),
[2, 2, 3])
|
<commit_before><commit_msg>Write test for core module of cupy<commit_after>import unittest
import cupy
from cupy.core import core
class TestGetSize(unittest.TestCase):
def test_none(self):
self.assertEqual(core.get_size(None), ())
def test_list(self):
self.assertEqual(core.get_size([1, 2]), (1, 2))
def test_tuple(self):
self.assertEqual(core.get_size((1, 2)), (1, 2))
def test_int(self):
self.assertEqual(core.get_size(1), (1,))
def test_invalid(self):
with self.assertRaises(ValueError):
core.get_size(1.0)
class TestInternalProd(unittest.TestCase):
def test_empty(self):
self.assertEqual(core.internal_prod([]), 1)
def test_one(self):
self.assertEqual(core.internal_prod([2]), 2)
def test_two(self):
self.assertEqual(core.internal_prod([2, 3]), 6)
class TestGetStridesForNocopyReshape(unittest.TestCase):
def test_different_size(self):
a = core.ndarray((2, 3))
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 5)),
[])
def test_one(self):
a = core.ndarray((1,), dtype=cupy.int32)
self.assertEqual(core._get_strides_for_nocopy_reshape(a, (1, 1, 1)),
[4, 4, 4])
def test_normal(self):
# TODO(nno): write test for normal case
pass
class TestGetContiguousStrides(unittest.TestCase):
def test_zero(self):
self.assertEqual(core._get_contiguous_strides((), 1), [])
def test_one(self):
self.assertEqual(core._get_contiguous_strides((1,), 2), [2])
def test_two(self):
self.assertEqual(core._get_contiguous_strides((1, 2), 3), [6, 3])
def test_three(self):
self.assertEqual(core._get_contiguous_strides((1, 2, 3), 4),
[24, 12, 4])
class TestGetCContiguity(unittest.TestCase):
def test_zero_in_shape(self):
self.assertTrue(core._get_c_contiguity((1, 0, 1), (1, 1, 1), 3))
def test_normal(self):
# TODO(unno): write test for normal case
pass
class TestInferUnknownDimension(unittest.TestCase):
def test_known_all(self):
self.assertEqual(core._infer_unknown_dimension((1, 2, 3), 6),
[1, 2, 3])
def test_multiple_unknown(self):
with self.assertRaises(ValueError):
core._infer_unknown_dimension((-1, 1, -1), 10)
def test_infer(self):
self.assertEqual(core._infer_unknown_dimension((-1, 2, 3), 12),
[2, 2, 3])
|
|
44023406197bd9271afd60405e323503ce6963a1
|
tests/test_nova_api_docs_tracker.py
|
tests/test_nova_api_docs_tracker.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
Fix stub unit test for main.py rename
|
Fix stub unit test for main.py rename
|
Python
|
apache-2.0
|
missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker,missaugustina/nova-api-docs-tracker
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Fix stub unit test for main.py rename
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Fix stub unit test for main.py rename<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
Fix stub unit test for main.py rename#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import nova_api_docs_tracker
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
<commit_msg>Fix stub unit test for main.py rename<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_nova_api_docs_tracker
----------------------------------
Tests for `nova_api_docs_tracker` module.
"""
import unittest
from nova_api_docs_tracker import main
class TestNova_api_docs_tracker(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_000_something(self):
pass
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
06804a64167ee09a7cf9c1681267223b2ce187e2
|
tests/test_core/test_wsservercomms.py
|
tests/test_core/test_wsservercomms.py
|
import unittest
from pkg_resources import require
from malcolm.core.wscomms.wsservercomms import WSServerComms
require("mock")
from mock import MagicMock
class TestWSServerComms(unittest.TestCase):
def test_send_to_client(self):
process = MagicMock()
ws = WSServerComms("Socket", process, object, None)
# ws.send_to_client("Test")
|
Remove context from Request to_dict, add Request from_dict
|
Remove context from Request to_dict, add Request from_dict
|
Python
|
apache-2.0
|
dls-controls/pymalcolm,dls-controls/pymalcolm,dls-controls/pymalcolm
|
Remove context from Request to_dict, add Request from_dict
|
import unittest
from pkg_resources import require
from malcolm.core.wscomms.wsservercomms import WSServerComms
require("mock")
from mock import MagicMock
class TestWSServerComms(unittest.TestCase):
def test_send_to_client(self):
process = MagicMock()
ws = WSServerComms("Socket", process, object, None)
# ws.send_to_client("Test")
|
<commit_before><commit_msg>Remove context from Request to_dict, add Request from_dict<commit_after>
|
import unittest
from pkg_resources import require
from malcolm.core.wscomms.wsservercomms import WSServerComms
require("mock")
from mock import MagicMock
class TestWSServerComms(unittest.TestCase):
def test_send_to_client(self):
process = MagicMock()
ws = WSServerComms("Socket", process, object, None)
# ws.send_to_client("Test")
|
Remove context from Request to_dict, add Request from_dictimport unittest
from pkg_resources import require
from malcolm.core.wscomms.wsservercomms import WSServerComms
require("mock")
from mock import MagicMock
class TestWSServerComms(unittest.TestCase):
def test_send_to_client(self):
process = MagicMock()
ws = WSServerComms("Socket", process, object, None)
# ws.send_to_client("Test")
|
<commit_before><commit_msg>Remove context from Request to_dict, add Request from_dict<commit_after>import unittest
from pkg_resources import require
from malcolm.core.wscomms.wsservercomms import WSServerComms
require("mock")
from mock import MagicMock
class TestWSServerComms(unittest.TestCase):
def test_send_to_client(self):
process = MagicMock()
ws = WSServerComms("Socket", process, object, None)
# ws.send_to_client("Test")
|
|
b612c3703a3b1581bfc7826f1e29a3b6053f0f4e
|
pal/services/joke_service.py
|
pal/services/joke_service.py
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
Make joke parsing more robust
|
Make joke parsing more robust
|
Python
|
bsd-3-clause
|
Machyne/pal,Machyne/pal,Machyne/pal,Machyne/pal
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
Make joke parsing more robust
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
<commit_before>import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
<commit_msg>Make joke parsing more robust<commit_after>
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
Make joke parsing more robustimport re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
<commit_before>import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
yield line.strip().split(" :: ", 1)
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
<commit_msg>Make joke parsing more robust<commit_after>import re
from os import path
from pal.services.service import Service
from pal.services.service import wrap_response
def get_jokes():
file_path = path.realpath(path.join(path.dirname(__file__),
"jokes.txt"))
with open(file_path, 'rb') as joke_file:
for line in joke_file.readlines():
if line.startswith("#"):
continue
prompt, response = map(str.strip, line.split("::", 1))
yield prompt, response.replace("\\n", "\n")
class JokeService(Service):
_JOKES = {prompt: response for prompt, response in get_jokes()}
def applies_to_me(self, client, feature_request_type):
return True
def get_confidence(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return 9001
return 0
@wrap_response
def go(self, params):
for joke in self._JOKES:
query = re.sub(r'[^a-z ]', '', params['query'].lower())
if joke in query:
return self._JOKES[joke]
return ('ERROR', 'Tom Hanks was in 1 movies.')
|
5b9d718cd02581659a8ab65c399ecf02884ae28c
|
scorecard/tests/indicators/test_income_adjustments.py
|
scorecard/tests/indicators/test_income_adjustments.py
|
from django.test import SimpleTestCase
from ...profile_data.indicators import (
IncomeAdjustments,
)
from collections import defaultdict
class MockAPIData:
references = defaultdict(lambda: "foobar")
def __init__(self, results, years):
self.results = results
self.years = years
class RevenueSourcesTests(SimpleTestCase):
maxDiff = None
def test_v1(self):
"""
- local and government are summed correctly
- total is calculated correctly
- percentages are calculated correctly
- latest audit year is used, other years ignored
"""
api_data = MockAPIData(
{
"revenue_budget_actual_v1": [
{
"item.code": "1300",
"amount.sum": 200,
"financial_year_end.year": 2050,
"amount_type.code": "ORGB",
},
{
"item.code": "1300",
"amount.sum": 210,
"financial_year_end.year": 2050,
"amount_type.code": "ADJB",
},
{
"item.code": "1300",
"amount.sum": 220,
"financial_year_end.year": 2050,
"amount_type.code": "AUDA",
},
],
"revenue_budget_actual_v2": [],
},
[2050, 2049, 2048, 2047]
)
expected = {
2050: [
{
"item": "Fines",
"amount": 10,
"comparison": "Original to adjusted budget",
"percent_changed": 5
},
{
"item": "Fines",
"amount": 20,
"comparison": "Original budget to audited outcome",
"percent_changed": 10
},
]
}
actual = IncomeAdjustments.get_muni_specifics(api_data)
self.assertEqual(expected, actual)
|
Add working test for minimal income adjustment functionality
|
Add working test for minimal income adjustment functionality
|
Python
|
mit
|
Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data,Code4SA/municipal-data
|
Add working test for minimal income adjustment functionality
|
from django.test import SimpleTestCase
from ...profile_data.indicators import (
IncomeAdjustments,
)
from collections import defaultdict
class MockAPIData:
references = defaultdict(lambda: "foobar")
def __init__(self, results, years):
self.results = results
self.years = years
class RevenueSourcesTests(SimpleTestCase):
maxDiff = None
def test_v1(self):
"""
- local and government are summed correctly
- total is calculated correctly
- percentages are calculated correctly
- latest audit year is used, other years ignored
"""
api_data = MockAPIData(
{
"revenue_budget_actual_v1": [
{
"item.code": "1300",
"amount.sum": 200,
"financial_year_end.year": 2050,
"amount_type.code": "ORGB",
},
{
"item.code": "1300",
"amount.sum": 210,
"financial_year_end.year": 2050,
"amount_type.code": "ADJB",
},
{
"item.code": "1300",
"amount.sum": 220,
"financial_year_end.year": 2050,
"amount_type.code": "AUDA",
},
],
"revenue_budget_actual_v2": [],
},
[2050, 2049, 2048, 2047]
)
expected = {
2050: [
{
"item": "Fines",
"amount": 10,
"comparison": "Original to adjusted budget",
"percent_changed": 5
},
{
"item": "Fines",
"amount": 20,
"comparison": "Original budget to audited outcome",
"percent_changed": 10
},
]
}
actual = IncomeAdjustments.get_muni_specifics(api_data)
self.assertEqual(expected, actual)
|
<commit_before><commit_msg>Add working test for minimal income adjustment functionality<commit_after>
|
from django.test import SimpleTestCase
from ...profile_data.indicators import (
IncomeAdjustments,
)
from collections import defaultdict
class MockAPIData:
references = defaultdict(lambda: "foobar")
def __init__(self, results, years):
self.results = results
self.years = years
class RevenueSourcesTests(SimpleTestCase):
maxDiff = None
def test_v1(self):
"""
- local and government are summed correctly
- total is calculated correctly
- percentages are calculated correctly
- latest audit year is used, other years ignored
"""
api_data = MockAPIData(
{
"revenue_budget_actual_v1": [
{
"item.code": "1300",
"amount.sum": 200,
"financial_year_end.year": 2050,
"amount_type.code": "ORGB",
},
{
"item.code": "1300",
"amount.sum": 210,
"financial_year_end.year": 2050,
"amount_type.code": "ADJB",
},
{
"item.code": "1300",
"amount.sum": 220,
"financial_year_end.year": 2050,
"amount_type.code": "AUDA",
},
],
"revenue_budget_actual_v2": [],
},
[2050, 2049, 2048, 2047]
)
expected = {
2050: [
{
"item": "Fines",
"amount": 10,
"comparison": "Original to adjusted budget",
"percent_changed": 5
},
{
"item": "Fines",
"amount": 20,
"comparison": "Original budget to audited outcome",
"percent_changed": 10
},
]
}
actual = IncomeAdjustments.get_muni_specifics(api_data)
self.assertEqual(expected, actual)
|
Add working test for minimal income adjustment functionalityfrom django.test import SimpleTestCase
from ...profile_data.indicators import (
IncomeAdjustments,
)
from collections import defaultdict
class MockAPIData:
references = defaultdict(lambda: "foobar")
def __init__(self, results, years):
self.results = results
self.years = years
class RevenueSourcesTests(SimpleTestCase):
maxDiff = None
def test_v1(self):
"""
- local and government are summed correctly
- total is calculated correctly
- percentages are calculated correctly
- latest audit year is used, other years ignored
"""
api_data = MockAPIData(
{
"revenue_budget_actual_v1": [
{
"item.code": "1300",
"amount.sum": 200,
"financial_year_end.year": 2050,
"amount_type.code": "ORGB",
},
{
"item.code": "1300",
"amount.sum": 210,
"financial_year_end.year": 2050,
"amount_type.code": "ADJB",
},
{
"item.code": "1300",
"amount.sum": 220,
"financial_year_end.year": 2050,
"amount_type.code": "AUDA",
},
],
"revenue_budget_actual_v2": [],
},
[2050, 2049, 2048, 2047]
)
expected = {
2050: [
{
"item": "Fines",
"amount": 10,
"comparison": "Original to adjusted budget",
"percent_changed": 5
},
{
"item": "Fines",
"amount": 20,
"comparison": "Original budget to audited outcome",
"percent_changed": 10
},
]
}
actual = IncomeAdjustments.get_muni_specifics(api_data)
self.assertEqual(expected, actual)
|
<commit_before><commit_msg>Add working test for minimal income adjustment functionality<commit_after>from django.test import SimpleTestCase
from ...profile_data.indicators import (
IncomeAdjustments,
)
from collections import defaultdict
class MockAPIData:
references = defaultdict(lambda: "foobar")
def __init__(self, results, years):
self.results = results
self.years = years
class RevenueSourcesTests(SimpleTestCase):
maxDiff = None
def test_v1(self):
"""
- local and government are summed correctly
- total is calculated correctly
- percentages are calculated correctly
- latest audit year is used, other years ignored
"""
api_data = MockAPIData(
{
"revenue_budget_actual_v1": [
{
"item.code": "1300",
"amount.sum": 200,
"financial_year_end.year": 2050,
"amount_type.code": "ORGB",
},
{
"item.code": "1300",
"amount.sum": 210,
"financial_year_end.year": 2050,
"amount_type.code": "ADJB",
},
{
"item.code": "1300",
"amount.sum": 220,
"financial_year_end.year": 2050,
"amount_type.code": "AUDA",
},
],
"revenue_budget_actual_v2": [],
},
[2050, 2049, 2048, 2047]
)
expected = {
2050: [
{
"item": "Fines",
"amount": 10,
"comparison": "Original to adjusted budget",
"percent_changed": 5
},
{
"item": "Fines",
"amount": 20,
"comparison": "Original budget to audited outcome",
"percent_changed": 10
},
]
}
actual = IncomeAdjustments.get_muni_specifics(api_data)
self.assertEqual(expected, actual)
|
|
f65eb2c5e3b07775285c231e5c211ef6739ac51a
|
nose2/tests/unit/test_options.py
|
nose2/tests/unit/test_options.py
|
from nose2 import options
from nose2.compat import unittest
class TestMultipassOptionParser(unittest.TestCase):
def setUp(self):
self.p = options.MultipassOptionParser()
def test_parser_leaves_unhandled_arguments(self):
args, argv = self.p.parse_args(['-x', 'foo', 'bar', '--this=that'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
def test_parser_can_extract_test_names(self):
self.p.add_argument('tests', nargs='*', default=[])
args, argv = self.p.parse_args(['-x', 'foo', 'bar'])
print args, argv
self.assertEqual(argv, ['-x'])
self.assertEqual(args.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(argv2, ['-x', '--this=that'])
self.assertEqual(args2.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_options_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('-x')
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(args2.x, 'foo')
self.assertEqual(argv2, ['--this=that'])
self.assertEqual(args2.tests, ['bar'])
|
Add unit tests for options module
|
Add unit tests for options module
|
Python
|
bsd-2-clause
|
little-dude/nose2,leth/nose2,ezigman/nose2,ptthiem/nose2,little-dude/nose2,leth/nose2,ojengwa/nose2,ojengwa/nose2,ptthiem/nose2,ezigman/nose2
|
Add unit tests for options module
|
from nose2 import options
from nose2.compat import unittest
class TestMultipassOptionParser(unittest.TestCase):
def setUp(self):
self.p = options.MultipassOptionParser()
def test_parser_leaves_unhandled_arguments(self):
args, argv = self.p.parse_args(['-x', 'foo', 'bar', '--this=that'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
def test_parser_can_extract_test_names(self):
self.p.add_argument('tests', nargs='*', default=[])
args, argv = self.p.parse_args(['-x', 'foo', 'bar'])
print args, argv
self.assertEqual(argv, ['-x'])
self.assertEqual(args.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(argv2, ['-x', '--this=that'])
self.assertEqual(args2.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_options_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('-x')
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(args2.x, 'foo')
self.assertEqual(argv2, ['--this=that'])
self.assertEqual(args2.tests, ['bar'])
|
<commit_before><commit_msg>Add unit tests for options module<commit_after>
|
from nose2 import options
from nose2.compat import unittest
class TestMultipassOptionParser(unittest.TestCase):
def setUp(self):
self.p = options.MultipassOptionParser()
def test_parser_leaves_unhandled_arguments(self):
args, argv = self.p.parse_args(['-x', 'foo', 'bar', '--this=that'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
def test_parser_can_extract_test_names(self):
self.p.add_argument('tests', nargs='*', default=[])
args, argv = self.p.parse_args(['-x', 'foo', 'bar'])
print args, argv
self.assertEqual(argv, ['-x'])
self.assertEqual(args.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(argv2, ['-x', '--this=that'])
self.assertEqual(args2.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_options_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('-x')
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(args2.x, 'foo')
self.assertEqual(argv2, ['--this=that'])
self.assertEqual(args2.tests, ['bar'])
|
Add unit tests for options modulefrom nose2 import options
from nose2.compat import unittest
class TestMultipassOptionParser(unittest.TestCase):
def setUp(self):
self.p = options.MultipassOptionParser()
def test_parser_leaves_unhandled_arguments(self):
args, argv = self.p.parse_args(['-x', 'foo', 'bar', '--this=that'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
def test_parser_can_extract_test_names(self):
self.p.add_argument('tests', nargs='*', default=[])
args, argv = self.p.parse_args(['-x', 'foo', 'bar'])
print args, argv
self.assertEqual(argv, ['-x'])
self.assertEqual(args.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(argv2, ['-x', '--this=that'])
self.assertEqual(args2.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_options_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('-x')
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(args2.x, 'foo')
self.assertEqual(argv2, ['--this=that'])
self.assertEqual(args2.tests, ['bar'])
|
<commit_before><commit_msg>Add unit tests for options module<commit_after>from nose2 import options
from nose2.compat import unittest
class TestMultipassOptionParser(unittest.TestCase):
def setUp(self):
self.p = options.MultipassOptionParser()
def test_parser_leaves_unhandled_arguments(self):
args, argv = self.p.parse_args(['-x', 'foo', 'bar', '--this=that'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
def test_parser_can_extract_test_names(self):
self.p.add_argument('tests', nargs='*', default=[])
args, argv = self.p.parse_args(['-x', 'foo', 'bar'])
print args, argv
self.assertEqual(argv, ['-x'])
self.assertEqual(args.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(argv2, ['-x', '--this=that'])
self.assertEqual(args2.tests, ['foo', 'bar'])
def test_parser_can_extract_config_files_then_options_then_tests(self):
self.p.add_argument(
'--config', '-c', nargs='?', action='append', default=[])
args, argv = self.p.parse_args(['-x', 'foo', '-c', 'conf.cfg',
'bar', '--this=that'])
self.assertEqual(args.config, ['conf.cfg'])
self.assertEqual(argv, ['-x', 'foo', 'bar', '--this=that'])
self.p.add_argument('-x')
self.p.add_argument('tests', nargs='*', default=[])
args2, argv2 = self.p.parse_args(argv)
self.assertEqual(args2.x, 'foo')
self.assertEqual(argv2, ['--this=that'])
self.assertEqual(args2.tests, ['bar'])
|
|
b8598cf14e2c9d0be404082c0761b4abecf7f97d
|
bioagents/resources/make_trips_ontology.py
|
bioagents/resources/make_trips_ontology.py
|
import os
import sys
import xml.etree.ElementTree as ET
from rdflib import Graph, Namespace, Literal
trips_ns = Namespace('http://trips.ihmc.us/concepts/')
isa_rel = Namespace('http://trips.ihmc.us/relations/').term('isa')
def save_hierarchy(g, path):
with open(path, 'wb') as out_file:
g_bytes = g.serialize(format='nt')
# Replace extra new lines in string and get rid of empty line at end
g_bytes = g_bytes.replace(b'\n\n', b'\n').strip()
# Split into rows and sort
rows = g_bytes.split(b'\n')
rows.sort()
g_bytes = b'\n'.join(rows)
out_file.write(g_bytes)
def make_hierarchy(tree):
g = Graph()
concepts = tree.findall('concept')
for concept in concepts:
name = concept.attrib['name'].replace('ont::', '')
if name == 'root':
continue
term = trips_ns.term(name)
relations = concept.find("relation[@label='inherit']")
related_names = [rr.strip().replace('ont::', '') for rr
in relations.text.strip().split('\n')]
for related_name in related_names:
related_term = trips_ns.term(related_name)
g.add((term, isa_rel, related_term))
return g
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python make_trips_ontology.py /path/to/trips-ont-dsl.xml')
sys.exit()
fname = sys.argv[1]
tree = ET.parse(fname)
g = make_hierarchy(tree)
save_hierarchy(g, 'trips_ontology.rdf')
|
Implement script to make TRIPS ontology
|
Implement script to make TRIPS ontology
|
Python
|
bsd-2-clause
|
sorgerlab/bioagents,bgyori/bioagents
|
Implement script to make TRIPS ontology
|
import os
import sys
import xml.etree.ElementTree as ET
from rdflib import Graph, Namespace, Literal
trips_ns = Namespace('http://trips.ihmc.us/concepts/')
isa_rel = Namespace('http://trips.ihmc.us/relations/').term('isa')
def save_hierarchy(g, path):
with open(path, 'wb') as out_file:
g_bytes = g.serialize(format='nt')
# Replace extra new lines in string and get rid of empty line at end
g_bytes = g_bytes.replace(b'\n\n', b'\n').strip()
# Split into rows and sort
rows = g_bytes.split(b'\n')
rows.sort()
g_bytes = b'\n'.join(rows)
out_file.write(g_bytes)
def make_hierarchy(tree):
g = Graph()
concepts = tree.findall('concept')
for concept in concepts:
name = concept.attrib['name'].replace('ont::', '')
if name == 'root':
continue
term = trips_ns.term(name)
relations = concept.find("relation[@label='inherit']")
related_names = [rr.strip().replace('ont::', '') for rr
in relations.text.strip().split('\n')]
for related_name in related_names:
related_term = trips_ns.term(related_name)
g.add((term, isa_rel, related_term))
return g
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python make_trips_ontology.py /path/to/trips-ont-dsl.xml')
sys.exit()
fname = sys.argv[1]
tree = ET.parse(fname)
g = make_hierarchy(tree)
save_hierarchy(g, 'trips_ontology.rdf')
|
<commit_before><commit_msg>Implement script to make TRIPS ontology<commit_after>
|
import os
import sys
import xml.etree.ElementTree as ET
from rdflib import Graph, Namespace, Literal
trips_ns = Namespace('http://trips.ihmc.us/concepts/')
isa_rel = Namespace('http://trips.ihmc.us/relations/').term('isa')
def save_hierarchy(g, path):
with open(path, 'wb') as out_file:
g_bytes = g.serialize(format='nt')
# Replace extra new lines in string and get rid of empty line at end
g_bytes = g_bytes.replace(b'\n\n', b'\n').strip()
# Split into rows and sort
rows = g_bytes.split(b'\n')
rows.sort()
g_bytes = b'\n'.join(rows)
out_file.write(g_bytes)
def make_hierarchy(tree):
g = Graph()
concepts = tree.findall('concept')
for concept in concepts:
name = concept.attrib['name'].replace('ont::', '')
if name == 'root':
continue
term = trips_ns.term(name)
relations = concept.find("relation[@label='inherit']")
related_names = [rr.strip().replace('ont::', '') for rr
in relations.text.strip().split('\n')]
for related_name in related_names:
related_term = trips_ns.term(related_name)
g.add((term, isa_rel, related_term))
return g
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python make_trips_ontology.py /path/to/trips-ont-dsl.xml')
sys.exit()
fname = sys.argv[1]
tree = ET.parse(fname)
g = make_hierarchy(tree)
save_hierarchy(g, 'trips_ontology.rdf')
|
Implement script to make TRIPS ontologyimport os
import sys
import xml.etree.ElementTree as ET
from rdflib import Graph, Namespace, Literal
trips_ns = Namespace('http://trips.ihmc.us/concepts/')
isa_rel = Namespace('http://trips.ihmc.us/relations/').term('isa')
def save_hierarchy(g, path):
with open(path, 'wb') as out_file:
g_bytes = g.serialize(format='nt')
# Replace extra new lines in string and get rid of empty line at end
g_bytes = g_bytes.replace(b'\n\n', b'\n').strip()
# Split into rows and sort
rows = g_bytes.split(b'\n')
rows.sort()
g_bytes = b'\n'.join(rows)
out_file.write(g_bytes)
def make_hierarchy(tree):
g = Graph()
concepts = tree.findall('concept')
for concept in concepts:
name = concept.attrib['name'].replace('ont::', '')
if name == 'root':
continue
term = trips_ns.term(name)
relations = concept.find("relation[@label='inherit']")
related_names = [rr.strip().replace('ont::', '') for rr
in relations.text.strip().split('\n')]
for related_name in related_names:
related_term = trips_ns.term(related_name)
g.add((term, isa_rel, related_term))
return g
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python make_trips_ontology.py /path/to/trips-ont-dsl.xml')
sys.exit()
fname = sys.argv[1]
tree = ET.parse(fname)
g = make_hierarchy(tree)
save_hierarchy(g, 'trips_ontology.rdf')
|
<commit_before><commit_msg>Implement script to make TRIPS ontology<commit_after>import os
import sys
import xml.etree.ElementTree as ET
from rdflib import Graph, Namespace, Literal
trips_ns = Namespace('http://trips.ihmc.us/concepts/')
isa_rel = Namespace('http://trips.ihmc.us/relations/').term('isa')
def save_hierarchy(g, path):
with open(path, 'wb') as out_file:
g_bytes = g.serialize(format='nt')
# Replace extra new lines in string and get rid of empty line at end
g_bytes = g_bytes.replace(b'\n\n', b'\n').strip()
# Split into rows and sort
rows = g_bytes.split(b'\n')
rows.sort()
g_bytes = b'\n'.join(rows)
out_file.write(g_bytes)
def make_hierarchy(tree):
g = Graph()
concepts = tree.findall('concept')
for concept in concepts:
name = concept.attrib['name'].replace('ont::', '')
if name == 'root':
continue
term = trips_ns.term(name)
relations = concept.find("relation[@label='inherit']")
related_names = [rr.strip().replace('ont::', '') for rr
in relations.text.strip().split('\n')]
for related_name in related_names:
related_term = trips_ns.term(related_name)
g.add((term, isa_rel, related_term))
return g
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: python make_trips_ontology.py /path/to/trips-ont-dsl.xml')
sys.exit()
fname = sys.argv[1]
tree = ET.parse(fname)
g = make_hierarchy(tree)
save_hierarchy(g, 'trips_ontology.rdf')
|
|
b13c4be2c839618a91d199ad941411d70214e9bd
|
audio_pipeline/tb_ui/model/MoveFiles.py
|
audio_pipeline/tb_ui/model/MoveFiles.py
|
import os
import subprocess
class MoveFiles:
def __init__(self, rule, copy):
"""
Move audiofiles to the appropriate destination directories,
as determined by the 'rule' function passed to rule
:param rule:
:return:
"""
self.rule = rule
self.copy = copy
def move_files(self, files):
"""
Iterate over the elements of a ProcessDirectory object, and move them to the correct directory,
using python subprocess
:param files:
:return:
"""
args = list()
files.first()
if self.copy:
args.append("copy")
else:
args.append("move")
for f in files:
self.model.first()
if self.copy_dir:
move = shutil.copy
else:
move = shutil.move
while self.model.has_next():
release = self.model.next()
# get path to has-mbid and no-mbid folders once per release
release_path = os.path.split(release[0].file_name)[0]
picard = release[0].picard
mb = release[0].mb
if not os.path.exists(picard):
os.mkdir(picard)
if not os.path.exists(mb):
os.mkdir(mb)
for track in release:
# move to correct folder
move(track.file_name, track.dest_dir)
print("moving " + ascii(track.file_name) + " to " + ascii(track.dest_dir))
try:
os.rmdir(picard)
except OSError as e:
pass
try:
os.rmdir(mb)
except OSError as e:
pass
try:
os.rmdir(release_path)
except OSError as e:
# release directory is not empty
continue
|
Add script to move files (move this out of MetaControl)
|
Add script to move files (move this out of MetaControl)
|
Python
|
mit
|
hidat/audio_pipeline
|
Add script to move files (move this out of MetaControl)
|
import os
import subprocess
class MoveFiles:
def __init__(self, rule, copy):
"""
Move audiofiles to the appropriate destination directories,
as determined by the 'rule' function passed to rule
:param rule:
:return:
"""
self.rule = rule
self.copy = copy
def move_files(self, files):
"""
Iterate over the elements of a ProcessDirectory object, and move them to the correct directory,
using python subprocess
:param files:
:return:
"""
args = list()
files.first()
if self.copy:
args.append("copy")
else:
args.append("move")
for f in files:
self.model.first()
if self.copy_dir:
move = shutil.copy
else:
move = shutil.move
while self.model.has_next():
release = self.model.next()
# get path to has-mbid and no-mbid folders once per release
release_path = os.path.split(release[0].file_name)[0]
picard = release[0].picard
mb = release[0].mb
if not os.path.exists(picard):
os.mkdir(picard)
if not os.path.exists(mb):
os.mkdir(mb)
for track in release:
# move to correct folder
move(track.file_name, track.dest_dir)
print("moving " + ascii(track.file_name) + " to " + ascii(track.dest_dir))
try:
os.rmdir(picard)
except OSError as e:
pass
try:
os.rmdir(mb)
except OSError as e:
pass
try:
os.rmdir(release_path)
except OSError as e:
# release directory is not empty
continue
|
<commit_before><commit_msg>Add script to move files (move this out of MetaControl)<commit_after>
|
import os
import subprocess
class MoveFiles:
def __init__(self, rule, copy):
"""
Move audiofiles to the appropriate destination directories,
as determined by the 'rule' function passed to rule
:param rule:
:return:
"""
self.rule = rule
self.copy = copy
def move_files(self, files):
"""
Iterate over the elements of a ProcessDirectory object, and move them to the correct directory,
using python subprocess
:param files:
:return:
"""
args = list()
files.first()
if self.copy:
args.append("copy")
else:
args.append("move")
for f in files:
self.model.first()
if self.copy_dir:
move = shutil.copy
else:
move = shutil.move
while self.model.has_next():
release = self.model.next()
# get path to has-mbid and no-mbid folders once per release
release_path = os.path.split(release[0].file_name)[0]
picard = release[0].picard
mb = release[0].mb
if not os.path.exists(picard):
os.mkdir(picard)
if not os.path.exists(mb):
os.mkdir(mb)
for track in release:
# move to correct folder
move(track.file_name, track.dest_dir)
print("moving " + ascii(track.file_name) + " to " + ascii(track.dest_dir))
try:
os.rmdir(picard)
except OSError as e:
pass
try:
os.rmdir(mb)
except OSError as e:
pass
try:
os.rmdir(release_path)
except OSError as e:
# release directory is not empty
continue
|
Add script to move files (move this out of MetaControl)import os
import subprocess
class MoveFiles:
def __init__(self, rule, copy):
"""
Move audiofiles to the appropriate destination directories,
as determined by the 'rule' function passed to rule
:param rule:
:return:
"""
self.rule = rule
self.copy = copy
def move_files(self, files):
"""
Iterate over the elements of a ProcessDirectory object, and move them to the correct directory,
using python subprocess
:param files:
:return:
"""
args = list()
files.first()
if self.copy:
args.append("copy")
else:
args.append("move")
for f in files:
self.model.first()
if self.copy_dir:
move = shutil.copy
else:
move = shutil.move
while self.model.has_next():
release = self.model.next()
# get path to has-mbid and no-mbid folders once per release
release_path = os.path.split(release[0].file_name)[0]
picard = release[0].picard
mb = release[0].mb
if not os.path.exists(picard):
os.mkdir(picard)
if not os.path.exists(mb):
os.mkdir(mb)
for track in release:
# move to correct folder
move(track.file_name, track.dest_dir)
print("moving " + ascii(track.file_name) + " to " + ascii(track.dest_dir))
try:
os.rmdir(picard)
except OSError as e:
pass
try:
os.rmdir(mb)
except OSError as e:
pass
try:
os.rmdir(release_path)
except OSError as e:
# release directory is not empty
continue
|
<commit_before><commit_msg>Add script to move files (move this out of MetaControl)<commit_after>import os
import subprocess
class MoveFiles:
def __init__(self, rule, copy):
"""
Move audiofiles to the appropriate destination directories,
as determined by the 'rule' function passed to rule
:param rule:
:return:
"""
self.rule = rule
self.copy = copy
def move_files(self, files):
"""
Iterate over the elements of a ProcessDirectory object, and move them to the correct directory,
using python subprocess
:param files:
:return:
"""
args = list()
files.first()
if self.copy:
args.append("copy")
else:
args.append("move")
for f in files:
self.model.first()
if self.copy_dir:
move = shutil.copy
else:
move = shutil.move
while self.model.has_next():
release = self.model.next()
# get path to has-mbid and no-mbid folders once per release
release_path = os.path.split(release[0].file_name)[0]
picard = release[0].picard
mb = release[0].mb
if not os.path.exists(picard):
os.mkdir(picard)
if not os.path.exists(mb):
os.mkdir(mb)
for track in release:
# move to correct folder
move(track.file_name, track.dest_dir)
print("moving " + ascii(track.file_name) + " to " + ascii(track.dest_dir))
try:
os.rmdir(picard)
except OSError as e:
pass
try:
os.rmdir(mb)
except OSError as e:
pass
try:
os.rmdir(release_path)
except OSError as e:
# release directory is not empty
continue
|
|
7a09d36448d646e29c8d0aeeb7c39df2d20885ab
|
test/unit/ggrc/models/test_states.py
|
test/unit/ggrc/models/test_states.py
|
"""Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
|
Add unit test for object state
|
Add unit test for object state
|
Python
|
apache-2.0
|
selahssea/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core
|
Add unit test for object state
|
"""Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
|
<commit_before><commit_msg>Add unit test for object state<commit_after>
|
"""Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
|
Add unit test for object state"""Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
|
<commit_before><commit_msg>Add unit test for object state<commit_after>"""Test Object State Module"""
import unittest
import ggrc.app # noqa pylint: disable=unused-import
from ggrc.models import all_models
class TestStates(unittest.TestCase):
"""Test Object State main Test Case class"""
def _assert_states(self, models, expected_states, default):
# pylint: disable=no-self-use
for model in all_models.all_models:
if model.__name__ not in models:
continue
assert hasattr(model, "valid_statuses"), \
"{} does not have valid_statuses".format(model.__name__)
assert set(model.valid_statuses()) == set(expected_states), \
"{} does not have expected states {}. Current states {}".format(
model.__name__, ', '.join(expected_states),
', '.join(model.valid_statuses()))
assert model.default_status() == default, \
"{} does not have expected default status {}, but {} instead".format(
model.__name__,
default,
model.default_status())
def test_basic_states(self):
"""Test basic object states"""
basic_states = ('Draft', 'Active', 'Deprecated')
basic_state_objects = (
'AccessGroup', 'Clause', 'Contract',
'Control', 'DataAsset', 'Directive', 'Facility', 'Issue', 'Market',
'Objective', 'OrgGroup', 'Policy', 'Process', 'Product', 'Program',
'Project', 'Regulation', 'Risk', 'Section', 'Standard', 'System',
'SystemOrProcess', 'Threat', 'Vendor')
self._assert_states(basic_state_objects, basic_states, 'Draft')
def test_audit_states(self):
"""Test states for Audit object"""
audit_states = ('Planned', 'In Progress', 'Manager Review',
'Ready for External Review', 'Completed')
self._assert_states(('Audit', ), audit_states, 'Planned')
def test_assignable_states(self):
"""Test states for Assignable objects (Assessment)"""
assignable_states = (
'In Progress', 'Completed', 'Not Started', 'Verified',
'Ready for Review')
self._assert_states(('Assessment', ), assignable_states, 'Not Started')
|
|
fadde85c5dac5509e1497aeaee97100cc9f5b3bd
|
tests/integration/test_with_ssl.py
|
tests/integration/test_with_ssl.py
|
from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
Add integration tests for SSL security provider
|
Add integration tests for SSL security provider
|
Python
|
bsd-3-clause
|
rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective,rafaduran/python-mcollective
|
Add integration tests for SSL security provider
|
from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
<commit_before><commit_msg>Add integration tests for SSL security provider<commit_after>
|
from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
Add integration tests for SSL security providerfrom . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
<commit_before><commit_msg>Add integration tests for SSL security provider<commit_after>from . import base
class SSLTestCase(base.IntegrationTestCase):
'''RabbitMQ integration test case.'''
CTXT = {
'plugin.activemq.pool.1.port': 61614,
'plugin.activemq.pool.1.password': 'marionette',
'plugin.ssl_server_public': 'tests/fixtures/server-public.pem',
'plugin.ssl_client_private': 'tests/fixtures/client-private.pem',
'plugin.ssl_client_public': 'tests/fixtures/client-public.pem',
}
class TestWithSSLMCo20x(base.MCollective20x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo22x(base.MCollective22x, SSLTestCase):
'''MCollective integration test case.'''
class TestWithSSLMCo23x(base.MCollective23x, SSLTestCase):
'''MCollective integration test case.'''
|
|
5980d3bbff67d060a1a1b15372293ced972dbe8b
|
tests/test_pipelines.py
|
tests/test_pipelines.py
|
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from scrapy import Spider, signals, Request
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SimplePipeline:
def process_item(self, item, spider):
item['pipeline_passed'] = True
return item
class DeferredPipeline:
def cb(self, item):
item['pipeline_passed'] = True
return item
def process_item(self, item, spider):
d = Deferred()
d.addCallback(self.cb)
d.callback(item)
return d
class ItemSpider(Spider):
name = 'itemspider'
def start_requests(self):
yield Request(self.mockserver.url('/status?n=200'))
def parse(self, response):
return {'field': 42}
class PipelineTestCase(unittest.TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.assertIsInstance(item, dict)
self.assertTrue(item.get('pipeline_passed'))
self.items.append(item)
def _create_crawler(self, pipeline_class):
settings = {
'ITEM_PIPELINES': {__name__ + '.' + pipeline_class.__name__: 1},
}
crawler = get_crawler(ItemSpider, settings)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
self.items = []
return crawler
@defer.inlineCallbacks
def test_simple_pipeline(self):
crawler = self._create_crawler(SimplePipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
@defer.inlineCallbacks
def test_deferred_pipeline(self):
crawler = self._create_crawler(DeferredPipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
|
Add simple tests for pipelines.
|
Add simple tests for pipelines.
|
Python
|
bsd-3-clause
|
pawelmhm/scrapy,scrapy/scrapy,pawelmhm/scrapy,scrapy/scrapy,elacuesta/scrapy,dangra/scrapy,eLRuLL/scrapy,eLRuLL/scrapy,elacuesta/scrapy,eLRuLL/scrapy,elacuesta/scrapy,pablohoffman/scrapy,starrify/scrapy,dangra/scrapy,pawelmhm/scrapy,starrify/scrapy,scrapy/scrapy,dangra/scrapy,pablohoffman/scrapy,pablohoffman/scrapy,starrify/scrapy
|
Add simple tests for pipelines.
|
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from scrapy import Spider, signals, Request
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SimplePipeline:
def process_item(self, item, spider):
item['pipeline_passed'] = True
return item
class DeferredPipeline:
def cb(self, item):
item['pipeline_passed'] = True
return item
def process_item(self, item, spider):
d = Deferred()
d.addCallback(self.cb)
d.callback(item)
return d
class ItemSpider(Spider):
name = 'itemspider'
def start_requests(self):
yield Request(self.mockserver.url('/status?n=200'))
def parse(self, response):
return {'field': 42}
class PipelineTestCase(unittest.TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.assertIsInstance(item, dict)
self.assertTrue(item.get('pipeline_passed'))
self.items.append(item)
def _create_crawler(self, pipeline_class):
settings = {
'ITEM_PIPELINES': {__name__ + '.' + pipeline_class.__name__: 1},
}
crawler = get_crawler(ItemSpider, settings)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
self.items = []
return crawler
@defer.inlineCallbacks
def test_simple_pipeline(self):
crawler = self._create_crawler(SimplePipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
@defer.inlineCallbacks
def test_deferred_pipeline(self):
crawler = self._create_crawler(DeferredPipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
|
<commit_before><commit_msg>Add simple tests for pipelines.<commit_after>
|
from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from scrapy import Spider, signals, Request
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SimplePipeline:
def process_item(self, item, spider):
item['pipeline_passed'] = True
return item
class DeferredPipeline:
def cb(self, item):
item['pipeline_passed'] = True
return item
def process_item(self, item, spider):
d = Deferred()
d.addCallback(self.cb)
d.callback(item)
return d
class ItemSpider(Spider):
name = 'itemspider'
def start_requests(self):
yield Request(self.mockserver.url('/status?n=200'))
def parse(self, response):
return {'field': 42}
class PipelineTestCase(unittest.TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.assertIsInstance(item, dict)
self.assertTrue(item.get('pipeline_passed'))
self.items.append(item)
def _create_crawler(self, pipeline_class):
settings = {
'ITEM_PIPELINES': {__name__ + '.' + pipeline_class.__name__: 1},
}
crawler = get_crawler(ItemSpider, settings)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
self.items = []
return crawler
@defer.inlineCallbacks
def test_simple_pipeline(self):
crawler = self._create_crawler(SimplePipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
@defer.inlineCallbacks
def test_deferred_pipeline(self):
crawler = self._create_crawler(DeferredPipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
|
Add simple tests for pipelines.from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from scrapy import Spider, signals, Request
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SimplePipeline:
def process_item(self, item, spider):
item['pipeline_passed'] = True
return item
class DeferredPipeline:
def cb(self, item):
item['pipeline_passed'] = True
return item
def process_item(self, item, spider):
d = Deferred()
d.addCallback(self.cb)
d.callback(item)
return d
class ItemSpider(Spider):
name = 'itemspider'
def start_requests(self):
yield Request(self.mockserver.url('/status?n=200'))
def parse(self, response):
return {'field': 42}
class PipelineTestCase(unittest.TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.assertIsInstance(item, dict)
self.assertTrue(item.get('pipeline_passed'))
self.items.append(item)
def _create_crawler(self, pipeline_class):
settings = {
'ITEM_PIPELINES': {__name__ + '.' + pipeline_class.__name__: 1},
}
crawler = get_crawler(ItemSpider, settings)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
self.items = []
return crawler
@defer.inlineCallbacks
def test_simple_pipeline(self):
crawler = self._create_crawler(SimplePipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
@defer.inlineCallbacks
def test_deferred_pipeline(self):
crawler = self._create_crawler(DeferredPipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
|
<commit_before><commit_msg>Add simple tests for pipelines.<commit_after>from twisted.internet import defer
from twisted.internet.defer import Deferred
from twisted.trial import unittest
from scrapy import Spider, signals, Request
from scrapy.utils.test import get_crawler
from tests.mockserver import MockServer
class SimplePipeline:
def process_item(self, item, spider):
item['pipeline_passed'] = True
return item
class DeferredPipeline:
def cb(self, item):
item['pipeline_passed'] = True
return item
def process_item(self, item, spider):
d = Deferred()
d.addCallback(self.cb)
d.callback(item)
return d
class ItemSpider(Spider):
name = 'itemspider'
def start_requests(self):
yield Request(self.mockserver.url('/status?n=200'))
def parse(self, response):
return {'field': 42}
class PipelineTestCase(unittest.TestCase):
def setUp(self):
self.mockserver = MockServer()
self.mockserver.__enter__()
def tearDown(self):
self.mockserver.__exit__(None, None, None)
def _on_item_scraped(self, item):
self.assertIsInstance(item, dict)
self.assertTrue(item.get('pipeline_passed'))
self.items.append(item)
def _create_crawler(self, pipeline_class):
settings = {
'ITEM_PIPELINES': {__name__ + '.' + pipeline_class.__name__: 1},
}
crawler = get_crawler(ItemSpider, settings)
crawler.signals.connect(self._on_item_scraped, signals.item_scraped)
self.items = []
return crawler
@defer.inlineCallbacks
def test_simple_pipeline(self):
crawler = self._create_crawler(SimplePipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
@defer.inlineCallbacks
def test_deferred_pipeline(self):
crawler = self._create_crawler(DeferredPipeline)
yield crawler.crawl(mockserver=self.mockserver)
self.assertEqual(len(self.items), 1)
|
|
c4f1a3dd38e83c799dcd505f81b9b14308331cb6
|
gpi/BNI2BART_Traj_GPI.py
|
gpi/BNI2BART_Traj_GPI.py
|
# Author: Ashley Anderson III <aganders3@gmail.com>
# Date: 2016-01-25 13:58
import numpy as np
import gpi
class ExternalNode(gpi.NodeAPI):
"""Transform coordinates from BNI conventions to BART conventions.
INPUT:
in - a numpy arrary of k-space coordinates in the BNI convention
i.e. (-0.5, 0.5), dimensions: [readouts, pts, xy(z)]
OUTPUT:
out - a numpy array of k-space coordinates in the BART convention
i.e. (-mtx/2, mtx/2), dimensions: [zyx, pts, readouts]
WIDGETS:
mtx - the resulting matrix size (assumed square/cubic)
"""
# initialize the UI - add widgets and input/output ports
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'mtx', val=128, min=1, max=1024)
# self.addWidget('DoubleSpinBox', 'bar', val=10, min=0, max=100)
# self.addWidget('PushButton', 'baz', toggle=True)
# self.addWidget('ExclusivePushButtons', 'qux',
# buttons=['Antoine', 'Colby', 'Trotter', 'Adair'], val=1)
# IO Ports
self.addInPort('in', 'NPYarray', ndim=3)
self.addOutPort('out', 'NPYarray', dtype=np.complex64, ndim=3)
# validate the data - runs immediately before compute
# your last chance to show/hide/edit widgets
# return 1 if the data is not valid - compute will not run
# return 0 if the data is valid - compute will run
def validate(self):
in_data = self.getData('in')
# TODO: make sure the input data is valid
# [your code here]
return 0
# process the input data, send it to the output port
# return 1 if the computation failed
# return 0 if the computation was successful
def compute(self):
coords = self.getData('in').copy()
mtx = self.getVal('mtx')
# just transpose first to reverse dimensions
coords = coords.T
# adjust by the matrix size
# TODO: account for "true resolution"
coords *= mtx
# reverse the xyz dimension
coords[:,...] = coords[::-1,...]
# pad the z-dimension with zeros if the trajectory is 2D
if coords.shape[0] == 2:
coords = np.pad(coords,
((0,1), (0,0), (0,0)),
mode='constant',
constant_values=0)
# if the trajectory is not 3D at this point, something has gone wrong
if coords.shape[0] != 3:
self.log.warn("The final dimensions of the input data must be 2 (xy), or 3 (xyz).")
return 1
self.setData('out', np.require(coords, dtype=np.complex64))
return 0
|
Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.
|
Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.
|
Python
|
bsd-3-clause
|
nckz/bart,nckz/bart,nckz/bart,nckz/bart,nckz/bart
|
Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.
|
# Author: Ashley Anderson III <aganders3@gmail.com>
# Date: 2016-01-25 13:58
import numpy as np
import gpi
class ExternalNode(gpi.NodeAPI):
"""Transform coordinates from BNI conventions to BART conventions.
INPUT:
in - a numpy arrary of k-space coordinates in the BNI convention
i.e. (-0.5, 0.5), dimensions: [readouts, pts, xy(z)]
OUTPUT:
out - a numpy array of k-space coordinates in the BART convention
i.e. (-mtx/2, mtx/2), dimensions: [zyx, pts, readouts]
WIDGETS:
mtx - the resulting matrix size (assumed square/cubic)
"""
# initialize the UI - add widgets and input/output ports
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'mtx', val=128, min=1, max=1024)
# self.addWidget('DoubleSpinBox', 'bar', val=10, min=0, max=100)
# self.addWidget('PushButton', 'baz', toggle=True)
# self.addWidget('ExclusivePushButtons', 'qux',
# buttons=['Antoine', 'Colby', 'Trotter', 'Adair'], val=1)
# IO Ports
self.addInPort('in', 'NPYarray', ndim=3)
self.addOutPort('out', 'NPYarray', dtype=np.complex64, ndim=3)
# validate the data - runs immediately before compute
# your last chance to show/hide/edit widgets
# return 1 if the data is not valid - compute will not run
# return 0 if the data is valid - compute will run
def validate(self):
in_data = self.getData('in')
# TODO: make sure the input data is valid
# [your code here]
return 0
# process the input data, send it to the output port
# return 1 if the computation failed
# return 0 if the computation was successful
def compute(self):
coords = self.getData('in').copy()
mtx = self.getVal('mtx')
# just transpose first to reverse dimensions
coords = coords.T
# adjust by the matrix size
# TODO: account for "true resolution"
coords *= mtx
# reverse the xyz dimension
coords[:,...] = coords[::-1,...]
# pad the z-dimension with zeros if the trajectory is 2D
if coords.shape[0] == 2:
coords = np.pad(coords,
((0,1), (0,0), (0,0)),
mode='constant',
constant_values=0)
# if the trajectory is not 3D at this point, something has gone wrong
if coords.shape[0] != 3:
self.log.warn("The final dimensions of the input data must be 2 (xy), or 3 (xyz).")
return 1
self.setData('out', np.require(coords, dtype=np.complex64))
return 0
|
<commit_before><commit_msg>Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.<commit_after>
|
# Author: Ashley Anderson III <aganders3@gmail.com>
# Date: 2016-01-25 13:58
import numpy as np
import gpi
class ExternalNode(gpi.NodeAPI):
"""Transform coordinates from BNI conventions to BART conventions.
INPUT:
in - a numpy arrary of k-space coordinates in the BNI convention
i.e. (-0.5, 0.5), dimensions: [readouts, pts, xy(z)]
OUTPUT:
out - a numpy array of k-space coordinates in the BART convention
i.e. (-mtx/2, mtx/2), dimensions: [zyx, pts, readouts]
WIDGETS:
mtx - the resulting matrix size (assumed square/cubic)
"""
# initialize the UI - add widgets and input/output ports
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'mtx', val=128, min=1, max=1024)
# self.addWidget('DoubleSpinBox', 'bar', val=10, min=0, max=100)
# self.addWidget('PushButton', 'baz', toggle=True)
# self.addWidget('ExclusivePushButtons', 'qux',
# buttons=['Antoine', 'Colby', 'Trotter', 'Adair'], val=1)
# IO Ports
self.addInPort('in', 'NPYarray', ndim=3)
self.addOutPort('out', 'NPYarray', dtype=np.complex64, ndim=3)
# validate the data - runs immediately before compute
# your last chance to show/hide/edit widgets
# return 1 if the data is not valid - compute will not run
# return 0 if the data is valid - compute will run
def validate(self):
in_data = self.getData('in')
# TODO: make sure the input data is valid
# [your code here]
return 0
# process the input data, send it to the output port
# return 1 if the computation failed
# return 0 if the computation was successful
def compute(self):
coords = self.getData('in').copy()
mtx = self.getVal('mtx')
# just transpose first to reverse dimensions
coords = coords.T
# adjust by the matrix size
# TODO: account for "true resolution"
coords *= mtx
# reverse the xyz dimension
coords[:,...] = coords[::-1,...]
# pad the z-dimension with zeros if the trajectory is 2D
if coords.shape[0] == 2:
coords = np.pad(coords,
((0,1), (0,0), (0,0)),
mode='constant',
constant_values=0)
# if the trajectory is not 3D at this point, something has gone wrong
if coords.shape[0] != 3:
self.log.warn("The final dimensions of the input data must be 2 (xy), or 3 (xyz).")
return 1
self.setData('out', np.require(coords, dtype=np.complex64))
return 0
|
Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.# Author: Ashley Anderson III <aganders3@gmail.com>
# Date: 2016-01-25 13:58
import numpy as np
import gpi
class ExternalNode(gpi.NodeAPI):
"""Transform coordinates from BNI conventions to BART conventions.
INPUT:
in - a numpy arrary of k-space coordinates in the BNI convention
i.e. (-0.5, 0.5), dimensions: [readouts, pts, xy(z)]
OUTPUT:
out - a numpy array of k-space coordinates in the BART convention
i.e. (-mtx/2, mtx/2), dimensions: [zyx, pts, readouts]
WIDGETS:
mtx - the resulting matrix size (assumed square/cubic)
"""
# initialize the UI - add widgets and input/output ports
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'mtx', val=128, min=1, max=1024)
# self.addWidget('DoubleSpinBox', 'bar', val=10, min=0, max=100)
# self.addWidget('PushButton', 'baz', toggle=True)
# self.addWidget('ExclusivePushButtons', 'qux',
# buttons=['Antoine', 'Colby', 'Trotter', 'Adair'], val=1)
# IO Ports
self.addInPort('in', 'NPYarray', ndim=3)
self.addOutPort('out', 'NPYarray', dtype=np.complex64, ndim=3)
# validate the data - runs immediately before compute
# your last chance to show/hide/edit widgets
# return 1 if the data is not valid - compute will not run
# return 0 if the data is valid - compute will run
def validate(self):
in_data = self.getData('in')
# TODO: make sure the input data is valid
# [your code here]
return 0
# process the input data, send it to the output port
# return 1 if the computation failed
# return 0 if the computation was successful
def compute(self):
coords = self.getData('in').copy()
mtx = self.getVal('mtx')
# just transpose first to reverse dimensions
coords = coords.T
# adjust by the matrix size
# TODO: account for "true resolution"
coords *= mtx
# reverse the xyz dimension
coords[:,...] = coords[::-1,...]
# pad the z-dimension with zeros if the trajectory is 2D
if coords.shape[0] == 2:
coords = np.pad(coords,
((0,1), (0,0), (0,0)),
mode='constant',
constant_values=0)
# if the trajectory is not 3D at this point, something has gone wrong
if coords.shape[0] != 3:
self.log.warn("The final dimensions of the input data must be 2 (xy), or 3 (xyz).")
return 1
self.setData('out', np.require(coords, dtype=np.complex64))
return 0
|
<commit_before><commit_msg>Add node to convert BNI trajectories (e.g. from SpiralCoords) to BART trajectories.<commit_after># Author: Ashley Anderson III <aganders3@gmail.com>
# Date: 2016-01-25 13:58
import numpy as np
import gpi
class ExternalNode(gpi.NodeAPI):
"""Transform coordinates from BNI conventions to BART conventions.
INPUT:
in - a numpy arrary of k-space coordinates in the BNI convention
i.e. (-0.5, 0.5), dimensions: [readouts, pts, xy(z)]
OUTPUT:
out - a numpy array of k-space coordinates in the BART convention
i.e. (-mtx/2, mtx/2), dimensions: [zyx, pts, readouts]
WIDGETS:
mtx - the resulting matrix size (assumed square/cubic)
"""
# initialize the UI - add widgets and input/output ports
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'mtx', val=128, min=1, max=1024)
# self.addWidget('DoubleSpinBox', 'bar', val=10, min=0, max=100)
# self.addWidget('PushButton', 'baz', toggle=True)
# self.addWidget('ExclusivePushButtons', 'qux',
# buttons=['Antoine', 'Colby', 'Trotter', 'Adair'], val=1)
# IO Ports
self.addInPort('in', 'NPYarray', ndim=3)
self.addOutPort('out', 'NPYarray', dtype=np.complex64, ndim=3)
# validate the data - runs immediately before compute
# your last chance to show/hide/edit widgets
# return 1 if the data is not valid - compute will not run
# return 0 if the data is valid - compute will run
def validate(self):
in_data = self.getData('in')
# TODO: make sure the input data is valid
# [your code here]
return 0
# process the input data, send it to the output port
# return 1 if the computation failed
# return 0 if the computation was successful
def compute(self):
coords = self.getData('in').copy()
mtx = self.getVal('mtx')
# just transpose first to reverse dimensions
coords = coords.T
# adjust by the matrix size
# TODO: account for "true resolution"
coords *= mtx
# reverse the xyz dimension
coords[:,...] = coords[::-1,...]
# pad the z-dimension with zeros if the trajectory is 2D
if coords.shape[0] == 2:
coords = np.pad(coords,
((0,1), (0,0), (0,0)),
mode='constant',
constant_values=0)
# if the trajectory is not 3D at this point, something has gone wrong
if coords.shape[0] != 3:
self.log.warn("The final dimensions of the input data must be 2 (xy), or 3 (xyz).")
return 1
self.setData('out', np.require(coords, dtype=np.complex64))
return 0
|
|
8e31bc9e750ac0f6b9bf4fd816ad3270e2f46d90
|
teamworkApp/lib/dbCalls.py
|
teamworkApp/lib/dbCalls.py
|
# muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores
|
Set up contextmanager for db cals
|
Set up contextmanager for db cals
|
Python
|
mit
|
nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis,nathanljustin/teamwork-analysis
|
Set up contextmanager for db cals
|
# muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores
|
<commit_before><commit_msg>Set up contextmanager for db cals<commit_after>
|
# muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores
|
Set up contextmanager for db cals# muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores
|
<commit_before><commit_msg>Set up contextmanager for db cals<commit_after># muddersOnRails()
# Sara McAllister November 5, 2-17
# Last updated: 11-5-2017
# library for SQLite database calls for teamwork analysis app
import contextlib
import sqlite3
DB = 'db/development.sqlite3'
def connect(sqlite_file):
""" Make connection to an SQLite database file """
conn = sqlite3.connect(sqlite_file)
c = conn.cursor()
return conn, c
def close(conn):
""" Commit changes and close connection to the database """
conn.commit()
conn.close()
@contextlib.contextmanager
def dbconnect(sqlite_file=DB):
conn, cursor = connect(sqlite_file)
try:
yield cursor
finally:
close(conn)
def getAllStyles():
"""Get all style entries in db ordered based on entry in db"""
with dbconnect() as cursor:
scores = cursor.execute('SELECT * FROM styles').fetchall()
return scores
|
|
66cdb36231ff1192a8a2e6b15c4b8d524cfbff6d
|
powerline/renderers/pango_markup.py
|
powerline/renderers/pango_markup.py
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
|
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3
|
Python
|
mit
|
dragon788/powerline,xxxhycl2010/powerline,cyrixhero/powerline,s0undt3ch/powerline,S0lll0s/powerline,EricSB/powerline,prvnkumar/powerline,blindFS/powerline,QuLogic/powerline,lukw00/powerline,dragon788/powerline,firebitsbr/powerline,Liangjianghao/powerline,DoctorJellyface/powerline,wfscheper/powerline,wfscheper/powerline,QuLogic/powerline,magus424/powerline,DoctorJellyface/powerline,junix/powerline,keelerm84/powerline,Luffin/powerline,xxxhycl2010/powerline,seanfisk/powerline,blindFS/powerline,areteix/powerline,darac/powerline,s0undt3ch/powerline,junix/powerline,seanfisk/powerline,cyrixhero/powerline,bezhermoso/powerline,xfumihiro/powerline,EricSB/powerline,xxxhycl2010/powerline,DoctorJellyface/powerline,lukw00/powerline,magus424/powerline,s0undt3ch/powerline,russellb/powerline,bartvm/powerline,areteix/powerline,Liangjianghao/powerline,wfscheper/powerline,S0lll0s/powerline,seanfisk/powerline,IvanAli/powerline,junix/powerline,bartvm/powerline,bezhermoso/powerline,EricSB/powerline,QuLogic/powerline,xfumihiro/powerline,Luffin/powerline,firebitsbr/powerline,russellb/powerline,bartvm/powerline,kenrachynski/powerline,bezhermoso/powerline,prvnkumar/powerline,Liangjianghao/powerline,darac/powerline,darac/powerline,kenrachynski/powerline,lukw00/powerline,prvnkumar/powerline,cyrixhero/powerline,magus424/powerline,IvanAli/powerline,areteix/powerline,Luffin/powerline,firebitsbr/powerline,keelerm84/powerline,russellb/powerline,S0lll0s/powerline,xfumihiro/powerline,blindFS/powerline,dragon788/powerline,IvanAli/powerline,kenrachynski/powerline
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
<commit_before># vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
<commit_msg>Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3<commit_after>
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3# vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
<commit_before># vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xmlrpclib import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
<commit_msg>Use xml.sax.saxutils.escape in place of xmlrpclib.escape
The latter is not available in python 3<commit_after># vim:fileencoding=utf-8:noet
from powerline.renderer import Renderer
from powerline.colorscheme import ATTR_BOLD, ATTR_ITALIC, ATTR_UNDERLINE
from xml.sax.saxutils import escape as _escape
class PangoMarkupRenderer(Renderer):
'''Powerline Pango markup segment renderer.'''
@staticmethod
def hlstyle(*args, **kwargs):
# We don't need to explicitly reset attributes, so skip those calls
return ''
def hl(self, contents, fg=None, bg=None, attr=None):
'''Highlight a segment.'''
awesome_attr = []
if fg is not None:
if fg is not False and fg[1] is not False:
awesome_attr += ['foreground="#{0:06x}"'.format(fg[1])]
if bg is not None:
if bg is not False and bg[1] is not False:
awesome_attr += ['background="#{0:06x}"'.format(bg[1])]
if attr is not None and attr is not False:
if attr & ATTR_BOLD:
awesome_attr += ['font_weight="bold"']
if attr & ATTR_ITALIC:
awesome_attr += ['font_style="italic"']
if attr & ATTR_UNDERLINE:
awesome_attr += ['underline="single"']
return '<span ' + ' '.join(awesome_attr) + '>' + contents + '</span>'
escape = staticmethod(_escape)
renderer = PangoMarkupRenderer
|
2cc323c72d296c3264ed90f3f404c1e9d04f5575
|
ci/management/commands/update_event.py
|
ci/management/commands/update_event.py
|
from django.core.management.base import BaseCommand
from ci import models
from optparse import make_option
import json, random
from ci import event
from django.conf import settings
from django.core.urlresolvers import reverse
import requests
def get_rand():
return str(random.randint(1, 10000000000))
def do_post(json_data, ev, base_url):
out_json = json.dumps(json_data, separators=(',', ': '))
server = ev.base.server()
url = ""
if server.host_type == settings.GITSERVER_GITHUB:
url = reverse('ci:github:webhook', args=[ev.build_user.build_key])
elif server.host_type == settings.GITSERVER_GITLAB:
url = reverse('ci:gitlab:webhook', args=[ev.build_user.build_key])
url = "%s%s" % (base_url, url)
print("Posting to URL: %s" % url)
response = requests.post(url, out_json)
response.raise_for_status()
class Command(BaseCommand):
help = 'TESTING ONLY! Grab the event, take the JSON data and change the SHA then post it again to get a new event.'
option_list = BaseCommand.option_list + (
make_option('--pk', dest='pk', type='int', help='The event to update'),
make_option('--url', dest='url', type='str', help='The Civet base URL'),
)
def handle(self, *args, **options):
ev_pk = options.get('pk')
url = options.get('url')
if not url or not ev_pk:
print("Missing arguments!")
return
ev = models.Event.objects.get(pk=ev_pk)
print("Updating event: %s" % ev)
settings.REMOTE_UPDATE = False
settings.INSTALL_WEBHOOK = False
json_data = json.loads(ev.json_data)
if ev.cause == ev.PULL_REQUEST:
json_data["pull_request"]["head"]["sha"] = get_rand()
do_post(json_data, ev, url)
elif ev.PUSH:
json_data["after"] = get_rand()
do_post(json_data, ev, url)
elif ev.MANUAL:
me = event.ManualEvent(ev.build_user, ev.branch, get_rand())
me.save()
|
Add a small utility for testing to update an event
|
Add a small utility for testing to update an event
|
Python
|
apache-2.0
|
idaholab/civet,brianmoose/civet,idaholab/civet,idaholab/civet,brianmoose/civet,brianmoose/civet,brianmoose/civet,idaholab/civet
|
Add a small utility for testing to update an event
|
from django.core.management.base import BaseCommand
from ci import models
from optparse import make_option
import json, random
from ci import event
from django.conf import settings
from django.core.urlresolvers import reverse
import requests
def get_rand():
return str(random.randint(1, 10000000000))
def do_post(json_data, ev, base_url):
out_json = json.dumps(json_data, separators=(',', ': '))
server = ev.base.server()
url = ""
if server.host_type == settings.GITSERVER_GITHUB:
url = reverse('ci:github:webhook', args=[ev.build_user.build_key])
elif server.host_type == settings.GITSERVER_GITLAB:
url = reverse('ci:gitlab:webhook', args=[ev.build_user.build_key])
url = "%s%s" % (base_url, url)
print("Posting to URL: %s" % url)
response = requests.post(url, out_json)
response.raise_for_status()
class Command(BaseCommand):
help = 'TESTING ONLY! Grab the event, take the JSON data and change the SHA then post it again to get a new event.'
option_list = BaseCommand.option_list + (
make_option('--pk', dest='pk', type='int', help='The event to update'),
make_option('--url', dest='url', type='str', help='The Civet base URL'),
)
def handle(self, *args, **options):
ev_pk = options.get('pk')
url = options.get('url')
if not url or not ev_pk:
print("Missing arguments!")
return
ev = models.Event.objects.get(pk=ev_pk)
print("Updating event: %s" % ev)
settings.REMOTE_UPDATE = False
settings.INSTALL_WEBHOOK = False
json_data = json.loads(ev.json_data)
if ev.cause == ev.PULL_REQUEST:
json_data["pull_request"]["head"]["sha"] = get_rand()
do_post(json_data, ev, url)
elif ev.PUSH:
json_data["after"] = get_rand()
do_post(json_data, ev, url)
elif ev.MANUAL:
me = event.ManualEvent(ev.build_user, ev.branch, get_rand())
me.save()
|
<commit_before><commit_msg>Add a small utility for testing to update an event<commit_after>
|
from django.core.management.base import BaseCommand
from ci import models
from optparse import make_option
import json, random
from ci import event
from django.conf import settings
from django.core.urlresolvers import reverse
import requests
def get_rand():
return str(random.randint(1, 10000000000))
def do_post(json_data, ev, base_url):
out_json = json.dumps(json_data, separators=(',', ': '))
server = ev.base.server()
url = ""
if server.host_type == settings.GITSERVER_GITHUB:
url = reverse('ci:github:webhook', args=[ev.build_user.build_key])
elif server.host_type == settings.GITSERVER_GITLAB:
url = reverse('ci:gitlab:webhook', args=[ev.build_user.build_key])
url = "%s%s" % (base_url, url)
print("Posting to URL: %s" % url)
response = requests.post(url, out_json)
response.raise_for_status()
class Command(BaseCommand):
help = 'TESTING ONLY! Grab the event, take the JSON data and change the SHA then post it again to get a new event.'
option_list = BaseCommand.option_list + (
make_option('--pk', dest='pk', type='int', help='The event to update'),
make_option('--url', dest='url', type='str', help='The Civet base URL'),
)
def handle(self, *args, **options):
ev_pk = options.get('pk')
url = options.get('url')
if not url or not ev_pk:
print("Missing arguments!")
return
ev = models.Event.objects.get(pk=ev_pk)
print("Updating event: %s" % ev)
settings.REMOTE_UPDATE = False
settings.INSTALL_WEBHOOK = False
json_data = json.loads(ev.json_data)
if ev.cause == ev.PULL_REQUEST:
json_data["pull_request"]["head"]["sha"] = get_rand()
do_post(json_data, ev, url)
elif ev.PUSH:
json_data["after"] = get_rand()
do_post(json_data, ev, url)
elif ev.MANUAL:
me = event.ManualEvent(ev.build_user, ev.branch, get_rand())
me.save()
|
Add a small utility for testing to update an eventfrom django.core.management.base import BaseCommand
from ci import models
from optparse import make_option
import json, random
from ci import event
from django.conf import settings
from django.core.urlresolvers import reverse
import requests
def get_rand():
return str(random.randint(1, 10000000000))
def do_post(json_data, ev, base_url):
out_json = json.dumps(json_data, separators=(',', ': '))
server = ev.base.server()
url = ""
if server.host_type == settings.GITSERVER_GITHUB:
url = reverse('ci:github:webhook', args=[ev.build_user.build_key])
elif server.host_type == settings.GITSERVER_GITLAB:
url = reverse('ci:gitlab:webhook', args=[ev.build_user.build_key])
url = "%s%s" % (base_url, url)
print("Posting to URL: %s" % url)
response = requests.post(url, out_json)
response.raise_for_status()
class Command(BaseCommand):
help = 'TESTING ONLY! Grab the event, take the JSON data and change the SHA then post it again to get a new event.'
option_list = BaseCommand.option_list + (
make_option('--pk', dest='pk', type='int', help='The event to update'),
make_option('--url', dest='url', type='str', help='The Civet base URL'),
)
def handle(self, *args, **options):
ev_pk = options.get('pk')
url = options.get('url')
if not url or not ev_pk:
print("Missing arguments!")
return
ev = models.Event.objects.get(pk=ev_pk)
print("Updating event: %s" % ev)
settings.REMOTE_UPDATE = False
settings.INSTALL_WEBHOOK = False
json_data = json.loads(ev.json_data)
if ev.cause == ev.PULL_REQUEST:
json_data["pull_request"]["head"]["sha"] = get_rand()
do_post(json_data, ev, url)
elif ev.PUSH:
json_data["after"] = get_rand()
do_post(json_data, ev, url)
elif ev.MANUAL:
me = event.ManualEvent(ev.build_user, ev.branch, get_rand())
me.save()
|
<commit_before><commit_msg>Add a small utility for testing to update an event<commit_after>from django.core.management.base import BaseCommand
from ci import models
from optparse import make_option
import json, random
from ci import event
from django.conf import settings
from django.core.urlresolvers import reverse
import requests
def get_rand():
return str(random.randint(1, 10000000000))
def do_post(json_data, ev, base_url):
out_json = json.dumps(json_data, separators=(',', ': '))
server = ev.base.server()
url = ""
if server.host_type == settings.GITSERVER_GITHUB:
url = reverse('ci:github:webhook', args=[ev.build_user.build_key])
elif server.host_type == settings.GITSERVER_GITLAB:
url = reverse('ci:gitlab:webhook', args=[ev.build_user.build_key])
url = "%s%s" % (base_url, url)
print("Posting to URL: %s" % url)
response = requests.post(url, out_json)
response.raise_for_status()
class Command(BaseCommand):
help = 'TESTING ONLY! Grab the event, take the JSON data and change the SHA then post it again to get a new event.'
option_list = BaseCommand.option_list + (
make_option('--pk', dest='pk', type='int', help='The event to update'),
make_option('--url', dest='url', type='str', help='The Civet base URL'),
)
def handle(self, *args, **options):
ev_pk = options.get('pk')
url = options.get('url')
if not url or not ev_pk:
print("Missing arguments!")
return
ev = models.Event.objects.get(pk=ev_pk)
print("Updating event: %s" % ev)
settings.REMOTE_UPDATE = False
settings.INSTALL_WEBHOOK = False
json_data = json.loads(ev.json_data)
if ev.cause == ev.PULL_REQUEST:
json_data["pull_request"]["head"]["sha"] = get_rand()
do_post(json_data, ev, url)
elif ev.PUSH:
json_data["after"] = get_rand()
do_post(json_data, ev, url)
elif ev.MANUAL:
me = event.ManualEvent(ev.build_user, ev.branch, get_rand())
me.save()
|
|
11c8b8c7e7e6a57148533ec6f9af138c4164d518
|
performance/us/kbase/workspace/performance/workspace/get_from_shock.py
|
performance/us/kbase/workspace/performance/workspace/get_from_shock.py
|
#!/usr/bin/env python
import requests
import time
import sys
from pymongo.mongo_client import MongoClient
MONGO_DB = 'ws_test'
SHOCK_HOST = 'http://localhost:7044'
def main():
token = sys.argv[1]
mcli = MongoClient()
db = mcli[MONGO_DB]
ws = -1
md5s = []
for ver in db.workspaceObjVersions.find().sort([('ws', 1), ('id', 1)]):
if ws == -1:
ws = ver['ws']
else:
if ws != ver['ws']:
raise ValueError('more than one workspace found')
md5s.append(ver['chksum'])
nodes = []
for md5 in md5s:
rec = db.shock_nodeMap.find_one({'chksum': md5})
nodes.append(rec['node'])
print nodes
headers = {'Authorization': 'OAuth ' + token}
times = []
count = 1
for node in nodes:
now = time.clock()
ret = requests.get(SHOCK_HOST + '/node/' + node + '/?download', headers=headers).text
times.append(time.clock() - now)
print count
count += 1
print 'N: ' + str(len(times))
mean = sum(times) / float(len(times))
print 'Mean: ' + str(mean)
ss = sum((x - mean)**2 for x in times)
print 'Stddev (sample): ' + str((ss/float(len(times) - 1))**0.5)
if __name__ == '__main__':
main()
|
Add small script for calculating shock retrieval times
|
Add small script for calculating shock retrieval times
|
Python
|
mit
|
kbase/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,kbase/workspace_deluxe,kbase/workspace_deluxe,kbase/workspace_deluxe,kbase/workspace_deluxe,MrCreosote/workspace_deluxe,MrCreosote/workspace_deluxe,MrCreosote/workspace_deluxe
|
Add small script for calculating shock retrieval times
|
#!/usr/bin/env python
import requests
import time
import sys
from pymongo.mongo_client import MongoClient
MONGO_DB = 'ws_test'
SHOCK_HOST = 'http://localhost:7044'
def main():
token = sys.argv[1]
mcli = MongoClient()
db = mcli[MONGO_DB]
ws = -1
md5s = []
for ver in db.workspaceObjVersions.find().sort([('ws', 1), ('id', 1)]):
if ws == -1:
ws = ver['ws']
else:
if ws != ver['ws']:
raise ValueError('more than one workspace found')
md5s.append(ver['chksum'])
nodes = []
for md5 in md5s:
rec = db.shock_nodeMap.find_one({'chksum': md5})
nodes.append(rec['node'])
print nodes
headers = {'Authorization': 'OAuth ' + token}
times = []
count = 1
for node in nodes:
now = time.clock()
ret = requests.get(SHOCK_HOST + '/node/' + node + '/?download', headers=headers).text
times.append(time.clock() - now)
print count
count += 1
print 'N: ' + str(len(times))
mean = sum(times) / float(len(times))
print 'Mean: ' + str(mean)
ss = sum((x - mean)**2 for x in times)
print 'Stddev (sample): ' + str((ss/float(len(times) - 1))**0.5)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add small script for calculating shock retrieval times<commit_after>
|
#!/usr/bin/env python
import requests
import time
import sys
from pymongo.mongo_client import MongoClient
MONGO_DB = 'ws_test'
SHOCK_HOST = 'http://localhost:7044'
def main():
token = sys.argv[1]
mcli = MongoClient()
db = mcli[MONGO_DB]
ws = -1
md5s = []
for ver in db.workspaceObjVersions.find().sort([('ws', 1), ('id', 1)]):
if ws == -1:
ws = ver['ws']
else:
if ws != ver['ws']:
raise ValueError('more than one workspace found')
md5s.append(ver['chksum'])
nodes = []
for md5 in md5s:
rec = db.shock_nodeMap.find_one({'chksum': md5})
nodes.append(rec['node'])
print nodes
headers = {'Authorization': 'OAuth ' + token}
times = []
count = 1
for node in nodes:
now = time.clock()
ret = requests.get(SHOCK_HOST + '/node/' + node + '/?download', headers=headers).text
times.append(time.clock() - now)
print count
count += 1
print 'N: ' + str(len(times))
mean = sum(times) / float(len(times))
print 'Mean: ' + str(mean)
ss = sum((x - mean)**2 for x in times)
print 'Stddev (sample): ' + str((ss/float(len(times) - 1))**0.5)
if __name__ == '__main__':
main()
|
Add small script for calculating shock retrieval times#!/usr/bin/env python
import requests
import time
import sys
from pymongo.mongo_client import MongoClient
MONGO_DB = 'ws_test'
SHOCK_HOST = 'http://localhost:7044'
def main():
token = sys.argv[1]
mcli = MongoClient()
db = mcli[MONGO_DB]
ws = -1
md5s = []
for ver in db.workspaceObjVersions.find().sort([('ws', 1), ('id', 1)]):
if ws == -1:
ws = ver['ws']
else:
if ws != ver['ws']:
raise ValueError('more than one workspace found')
md5s.append(ver['chksum'])
nodes = []
for md5 in md5s:
rec = db.shock_nodeMap.find_one({'chksum': md5})
nodes.append(rec['node'])
print nodes
headers = {'Authorization': 'OAuth ' + token}
times = []
count = 1
for node in nodes:
now = time.clock()
ret = requests.get(SHOCK_HOST + '/node/' + node + '/?download', headers=headers).text
times.append(time.clock() - now)
print count
count += 1
print 'N: ' + str(len(times))
mean = sum(times) / float(len(times))
print 'Mean: ' + str(mean)
ss = sum((x - mean)**2 for x in times)
print 'Stddev (sample): ' + str((ss/float(len(times) - 1))**0.5)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add small script for calculating shock retrieval times<commit_after>#!/usr/bin/env python
import requests
import time
import sys
from pymongo.mongo_client import MongoClient
MONGO_DB = 'ws_test'
SHOCK_HOST = 'http://localhost:7044'
def main():
token = sys.argv[1]
mcli = MongoClient()
db = mcli[MONGO_DB]
ws = -1
md5s = []
for ver in db.workspaceObjVersions.find().sort([('ws', 1), ('id', 1)]):
if ws == -1:
ws = ver['ws']
else:
if ws != ver['ws']:
raise ValueError('more than one workspace found')
md5s.append(ver['chksum'])
nodes = []
for md5 in md5s:
rec = db.shock_nodeMap.find_one({'chksum': md5})
nodes.append(rec['node'])
print nodes
headers = {'Authorization': 'OAuth ' + token}
times = []
count = 1
for node in nodes:
now = time.clock()
ret = requests.get(SHOCK_HOST + '/node/' + node + '/?download', headers=headers).text
times.append(time.clock() - now)
print count
count += 1
print 'N: ' + str(len(times))
mean = sum(times) / float(len(times))
print 'Mean: ' + str(mean)
ss = sum((x - mean)**2 for x in times)
print 'Stddev (sample): ' + str((ss/float(len(times) - 1))**0.5)
if __name__ == '__main__':
main()
|
|
dd9b7efa6e1255a26ae05ccab8e7ed19ac83961b
|
colour/examples/volume/examples_rgb.py
|
colour/examples/volume/examples_rgb.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
Add "RGB" colorspace volume computation example.
|
Add "RGB" colorspace volume computation example.
|
Python
|
bsd-3-clause
|
colour-science/colour
|
Add "RGB" colorspace volume computation example.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
<commit_before><commit_msg>Add "RGB" colorspace volume computation example.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
Add "RGB" colorspace volume computation example.#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
<commit_before><commit_msg>Add "RGB" colorspace volume computation example.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
|
29bbbc8ded596b19e5c090fc4264333126b9a995
|
datasets/management/commands/clear_store.py
|
datasets/management/commands/clear_store.py
|
from django.core.management.base import BaseCommand
from utils.redis_store import store
class Command(BaseCommand):
help = 'Remove all keys stored in Redis Store. Use it as python manage.py clear_store'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
count = store.delete_keys()
print('Deleted {0} keys'.format(count))
|
Add command to clear redis store contents
|
Add command to clear redis store contents
|
Python
|
agpl-3.0
|
MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets,MTG/freesound-datasets
|
Add command to clear redis store contents
|
from django.core.management.base import BaseCommand
from utils.redis_store import store
class Command(BaseCommand):
help = 'Remove all keys stored in Redis Store. Use it as python manage.py clear_store'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
count = store.delete_keys()
print('Deleted {0} keys'.format(count))
|
<commit_before><commit_msg>Add command to clear redis store contents<commit_after>
|
from django.core.management.base import BaseCommand
from utils.redis_store import store
class Command(BaseCommand):
help = 'Remove all keys stored in Redis Store. Use it as python manage.py clear_store'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
count = store.delete_keys()
print('Deleted {0} keys'.format(count))
|
Add command to clear redis store contentsfrom django.core.management.base import BaseCommand
from utils.redis_store import store
class Command(BaseCommand):
help = 'Remove all keys stored in Redis Store. Use it as python manage.py clear_store'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
count = store.delete_keys()
print('Deleted {0} keys'.format(count))
|
<commit_before><commit_msg>Add command to clear redis store contents<commit_after>from django.core.management.base import BaseCommand
from utils.redis_store import store
class Command(BaseCommand):
help = 'Remove all keys stored in Redis Store. Use it as python manage.py clear_store'
def add_arguments(self, parser):
pass
def handle(self, *args, **options):
count = store.delete_keys()
print('Deleted {0} keys'.format(count))
|
|
295772657e61edf0dc10b3c5206248c6bfb6273f
|
py_naca0020_3d_openfoam/plotting.py
|
py_naca0020_3d_openfoam/plotting.py
|
"""
Plotting functions.
"""
import matplotlib.pyplot as plt
from .processing import *
def plot_spanwise_pressure(ax=None):
"""Plot spanwise pressure, normalized and inverted."""
df = load_sampled_set("spanwise", "p")
df["p_norm"] = -df.p
df.p_norm -= df.p_norm.min()
df.p_norm /= df.p_norm.max()
if ax is None:
fig, ax = plt.subplots()
ax.plot(df.z, df.p_norm)
ax.set_xlabel("$z/H$")
ax.set_ylabel(r"$-\hat{p}$")
|
Add function to plot spanwise pressure
|
Add function to plot spanwise pressure
|
Python
|
mit
|
petebachant/actuatorLine-3D-turbinesFoam,petebachant/actuatorLine-3D-turbinesFoam,petebachant/actuatorLine-3D-turbinesFoam,petebachant/NACA0020-3D-OpenFOAM,petebachant/NACA0020-3D-OpenFOAM,petebachant/NACA0020-3D-OpenFOAM
|
Add function to plot spanwise pressure
|
"""
Plotting functions.
"""
import matplotlib.pyplot as plt
from .processing import *
def plot_spanwise_pressure(ax=None):
"""Plot spanwise pressure, normalized and inverted."""
df = load_sampled_set("spanwise", "p")
df["p_norm"] = -df.p
df.p_norm -= df.p_norm.min()
df.p_norm /= df.p_norm.max()
if ax is None:
fig, ax = plt.subplots()
ax.plot(df.z, df.p_norm)
ax.set_xlabel("$z/H$")
ax.set_ylabel(r"$-\hat{p}$")
|
<commit_before><commit_msg>Add function to plot spanwise pressure<commit_after>
|
"""
Plotting functions.
"""
import matplotlib.pyplot as plt
from .processing import *
def plot_spanwise_pressure(ax=None):
"""Plot spanwise pressure, normalized and inverted."""
df = load_sampled_set("spanwise", "p")
df["p_norm"] = -df.p
df.p_norm -= df.p_norm.min()
df.p_norm /= df.p_norm.max()
if ax is None:
fig, ax = plt.subplots()
ax.plot(df.z, df.p_norm)
ax.set_xlabel("$z/H$")
ax.set_ylabel(r"$-\hat{p}$")
|
Add function to plot spanwise pressure"""
Plotting functions.
"""
import matplotlib.pyplot as plt
from .processing import *
def plot_spanwise_pressure(ax=None):
"""Plot spanwise pressure, normalized and inverted."""
df = load_sampled_set("spanwise", "p")
df["p_norm"] = -df.p
df.p_norm -= df.p_norm.min()
df.p_norm /= df.p_norm.max()
if ax is None:
fig, ax = plt.subplots()
ax.plot(df.z, df.p_norm)
ax.set_xlabel("$z/H$")
ax.set_ylabel(r"$-\hat{p}$")
|
<commit_before><commit_msg>Add function to plot spanwise pressure<commit_after>"""
Plotting functions.
"""
import matplotlib.pyplot as plt
from .processing import *
def plot_spanwise_pressure(ax=None):
"""Plot spanwise pressure, normalized and inverted."""
df = load_sampled_set("spanwise", "p")
df["p_norm"] = -df.p
df.p_norm -= df.p_norm.min()
df.p_norm /= df.p_norm.max()
if ax is None:
fig, ax = plt.subplots()
ax.plot(df.z, df.p_norm)
ax.set_xlabel("$z/H$")
ax.set_ylabel(r"$-\hat{p}$")
|
|
ef2507008bfc9e1adc3d4926a587e562a3eb8129
|
django/website/logframe/tests/test_admin.py
|
django/website/logframe/tests/test_admin.py
|
from mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
|
Add test for SubIndicatorAdmin result method
|
Add test for SubIndicatorAdmin result method
|
Python
|
agpl-3.0
|
aptivate/kashana,aptivate/kashana,aptivate/alfie,daniell/kashana,daniell/kashana,aptivate/alfie,aptivate/kashana,daniell/kashana,daniell/kashana,aptivate/alfie,aptivate/kashana,aptivate/alfie
|
Add test for SubIndicatorAdmin result method
|
from mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
|
<commit_before><commit_msg>Add test for SubIndicatorAdmin result method<commit_after>
|
from mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
|
Add test for SubIndicatorAdmin result methodfrom mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
|
<commit_before><commit_msg>Add test for SubIndicatorAdmin result method<commit_after>from mock import Mock
from ..admin import SubIndicatorAdmin
from ..models import SubIndicator
def test_sub_indicator_admin_rsult_returns_indicator_result():
sub_indicator = Mock(indicator=Mock(result='result'))
admin = SubIndicatorAdmin(SubIndicator, None)
assert sub_indicator.indicator.result == admin.result(sub_indicator)
|
|
8b847215c2ae071a4a2e402167e20fdd641b222d
|
xenserver/destroy_cached_images.py
|
xenserver/destroy_cached_images.py
|
"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
FLAGS = flags.FLAGS
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
FLAGS.register_cli_opts(destroy_opts)
def main():
flags.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=FLAGS.all_cached,
dry_run=FLAGS.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
Add script to destroy cached images.
|
XenAPI: Add script to destroy cached images.
Operations will want the ability to clear out cached images when
disk-space becomes an issue.
This script allows ops to clear out all cached images or just cached
images that aren't in current use.
Change-Id: If87bd10ef3f893c416d2f0615358ba65aef17a2d
|
Python
|
apache-2.0
|
emonty/oslo-hacking,zancas/hacking,zancas/hacking,emonty/oslo-hacking,hyakuhei/cleantox,hyakuhei/cleantox,openstack-dev/hacking,openstack-dev/hacking
|
XenAPI: Add script to destroy cached images.
Operations will want the ability to clear out cached images when
disk-space becomes an issue.
This script allows ops to clear out all cached images or just cached
images that aren't in current use.
Change-Id: If87bd10ef3f893c416d2f0615358ba65aef17a2d
|
"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
FLAGS = flags.FLAGS
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
FLAGS.register_cli_opts(destroy_opts)
def main():
flags.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=FLAGS.all_cached,
dry_run=FLAGS.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>XenAPI: Add script to destroy cached images.
Operations will want the ability to clear out cached images when
disk-space becomes an issue.
This script allows ops to clear out all cached images or just cached
images that aren't in current use.
Change-Id: If87bd10ef3f893c416d2f0615358ba65aef17a2d<commit_after>
|
"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
FLAGS = flags.FLAGS
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
FLAGS.register_cli_opts(destroy_opts)
def main():
flags.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=FLAGS.all_cached,
dry_run=FLAGS.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
XenAPI: Add script to destroy cached images.
Operations will want the ability to clear out cached images when
disk-space becomes an issue.
This script allows ops to clear out all cached images or just cached
images that aren't in current use.
Change-Id: If87bd10ef3f893c416d2f0615358ba65aef17a2d"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
FLAGS = flags.FLAGS
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
FLAGS.register_cli_opts(destroy_opts)
def main():
flags.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=FLAGS.all_cached,
dry_run=FLAGS.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>XenAPI: Add script to destroy cached images.
Operations will want the ability to clear out cached images when
disk-space becomes an issue.
This script allows ops to clear out all cached images or just cached
images that aren't in current use.
Change-Id: If87bd10ef3f893c416d2f0615358ba65aef17a2d<commit_after>"""
destroy_cached_images.py
This script is used to clean up Glance images that are cached in the SR. By
default, this script will only cleanup unused cached images.
Options:
--dry_run - Don't actually destroy the VDIs
--all_cached - Destroy all cached images instead of just unused cached
images.
"""
import eventlet
eventlet.monkey_patch()
import os
import sys
# If ../nova/__init__.py exists, add ../ to Python search path, so that
# it will override what happens to be installed in /usr/(local/)lib/python...
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'nova', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
from nova import flags
from nova.openstack.common import cfg
from nova.openstack.common import log as logging
from nova import utils
from nova.virt.xenapi import driver as xenapi_driver
from nova.virt.xenapi import vm_utils
FLAGS = flags.FLAGS
destroy_opts = [
cfg.BoolOpt('all_cached',
default=False,
help='Destroy all cached images instead of just unused cached'
' images.'),
cfg.BoolOpt('dry_run',
default=False,
help='Don\'t actually delete the VDIs.')
]
FLAGS.register_cli_opts(destroy_opts)
def main():
flags.parse_args(sys.argv)
utils.monkey_patch()
xenapi = xenapi_driver.XenAPIDriver()
session = xenapi._session
sr_ref = vm_utils.safe_find_sr(session)
destroyed = vm_utils.destroy_cached_images(
session, sr_ref, all_cached=FLAGS.all_cached,
dry_run=FLAGS.dry_run)
if '--verbose' in sys.argv:
print '\n'.join(destroyed)
print "Destroyed %d cached VDIs" % len(destroyed)
if __name__ == "__main__":
main()
|
|
e2caa28af50b5c053b8da248c3b05b3a3bb33be0
|
shapely/tests/test_collection.py
|
shapely/tests/test_collection.py
|
import unittest
from shapely.geometry.collection import GeometryCollection
class CollectionTestCase(unittest.TestCase):
def test_array_interface(self):
m = GeometryCollection()
self.failUnlessEqual(len(m), 0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(CollectionTestCase)
|
Add test of empty geometry collection creation.
|
Add test of empty geometry collection creation.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1599 b426a367-1105-0410-b9ff-cdf4ab011145
|
Python
|
bsd-3-clause
|
mindw/shapely,mouadino/Shapely,mouadino/Shapely,mindw/shapely,abali96/Shapely,jdmcbr/Shapely,jdmcbr/Shapely,abali96/Shapely
|
Add test of empty geometry collection creation.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1599 b426a367-1105-0410-b9ff-cdf4ab011145
|
import unittest
from shapely.geometry.collection import GeometryCollection
class CollectionTestCase(unittest.TestCase):
def test_array_interface(self):
m = GeometryCollection()
self.failUnlessEqual(len(m), 0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(CollectionTestCase)
|
<commit_before><commit_msg>Add test of empty geometry collection creation.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1599 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>
|
import unittest
from shapely.geometry.collection import GeometryCollection
class CollectionTestCase(unittest.TestCase):
def test_array_interface(self):
m = GeometryCollection()
self.failUnlessEqual(len(m), 0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(CollectionTestCase)
|
Add test of empty geometry collection creation.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1599 b426a367-1105-0410-b9ff-cdf4ab011145import unittest
from shapely.geometry.collection import GeometryCollection
class CollectionTestCase(unittest.TestCase):
def test_array_interface(self):
m = GeometryCollection()
self.failUnlessEqual(len(m), 0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(CollectionTestCase)
|
<commit_before><commit_msg>Add test of empty geometry collection creation.
git-svn-id: 1a8067f95329a7fca9bad502d13a880b95ac544b@1599 b426a367-1105-0410-b9ff-cdf4ab011145<commit_after>import unittest
from shapely.geometry.collection import GeometryCollection
class CollectionTestCase(unittest.TestCase):
def test_array_interface(self):
m = GeometryCollection()
self.failUnlessEqual(len(m), 0)
def test_suite():
return unittest.TestLoader().loadTestsFromTestCase(CollectionTestCase)
|
|
57c55df6e848b7e43cc306ae9ae05363de766df9
|
pymt/framework/tests/test_bmi_time_units.py
|
pymt/framework/tests/test_bmi_time_units.py
|
from nose.tools import assert_equal
from pymt.framework.bmi_bridge import _BmiCap
class SimpleTimeBmi():
def get_time_units(self):
return 0, 'h'
def get_start_time(self):
return 0, 1.
def get_current_time(self):
return 0, 10.5
def get_end_time(self):
return 0, 72
def get_time_step(self):
return 0, 0.25
class Bmi(_BmiCap):
_cls = SimpleTimeBmi
def test_time_wrap():
bmi = Bmi()
assert_equal(bmi.get_time_units(), 'h')
assert_equal(bmi.get_start_time(), 1.)
assert_equal(bmi.get_current_time(), 10.5)
assert_equal(bmi.get_end_time(), 72.)
assert_equal(bmi.get_time_step(), .25)
assert_equal(bmi.time_units, 'h')
def test_time_conversion():
bmi = Bmi()
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_start_time(units='min'), 60.)
assert_equal(bmi.get_current_time(units='min'), 630.)
assert_equal(bmi.get_end_time(units='d'), 3)
def test_change_time_units():
bmi = Bmi()
assert_equal(bmi.time_units, 'h')
bmi.time_units = 'min'
assert_equal(bmi.get_start_time(), 60.)
assert_equal(bmi.get_current_time(), 630.)
assert_equal(bmi.get_end_time(), 72 * 60)
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_current_time(units='h'), 10.5)
assert_equal(bmi.get_end_time(units='h'), 72)
|
Add tests for time units methods.
|
Add tests for time units methods.
|
Python
|
mit
|
csdms/pymt,csdms/coupling,csdms/coupling
|
Add tests for time units methods.
|
from nose.tools import assert_equal
from pymt.framework.bmi_bridge import _BmiCap
class SimpleTimeBmi():
def get_time_units(self):
return 0, 'h'
def get_start_time(self):
return 0, 1.
def get_current_time(self):
return 0, 10.5
def get_end_time(self):
return 0, 72
def get_time_step(self):
return 0, 0.25
class Bmi(_BmiCap):
_cls = SimpleTimeBmi
def test_time_wrap():
bmi = Bmi()
assert_equal(bmi.get_time_units(), 'h')
assert_equal(bmi.get_start_time(), 1.)
assert_equal(bmi.get_current_time(), 10.5)
assert_equal(bmi.get_end_time(), 72.)
assert_equal(bmi.get_time_step(), .25)
assert_equal(bmi.time_units, 'h')
def test_time_conversion():
bmi = Bmi()
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_start_time(units='min'), 60.)
assert_equal(bmi.get_current_time(units='min'), 630.)
assert_equal(bmi.get_end_time(units='d'), 3)
def test_change_time_units():
bmi = Bmi()
assert_equal(bmi.time_units, 'h')
bmi.time_units = 'min'
assert_equal(bmi.get_start_time(), 60.)
assert_equal(bmi.get_current_time(), 630.)
assert_equal(bmi.get_end_time(), 72 * 60)
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_current_time(units='h'), 10.5)
assert_equal(bmi.get_end_time(units='h'), 72)
|
<commit_before><commit_msg>Add tests for time units methods.<commit_after>
|
from nose.tools import assert_equal
from pymt.framework.bmi_bridge import _BmiCap
class SimpleTimeBmi():
def get_time_units(self):
return 0, 'h'
def get_start_time(self):
return 0, 1.
def get_current_time(self):
return 0, 10.5
def get_end_time(self):
return 0, 72
def get_time_step(self):
return 0, 0.25
class Bmi(_BmiCap):
_cls = SimpleTimeBmi
def test_time_wrap():
bmi = Bmi()
assert_equal(bmi.get_time_units(), 'h')
assert_equal(bmi.get_start_time(), 1.)
assert_equal(bmi.get_current_time(), 10.5)
assert_equal(bmi.get_end_time(), 72.)
assert_equal(bmi.get_time_step(), .25)
assert_equal(bmi.time_units, 'h')
def test_time_conversion():
bmi = Bmi()
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_start_time(units='min'), 60.)
assert_equal(bmi.get_current_time(units='min'), 630.)
assert_equal(bmi.get_end_time(units='d'), 3)
def test_change_time_units():
bmi = Bmi()
assert_equal(bmi.time_units, 'h')
bmi.time_units = 'min'
assert_equal(bmi.get_start_time(), 60.)
assert_equal(bmi.get_current_time(), 630.)
assert_equal(bmi.get_end_time(), 72 * 60)
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_current_time(units='h'), 10.5)
assert_equal(bmi.get_end_time(units='h'), 72)
|
Add tests for time units methods.from nose.tools import assert_equal
from pymt.framework.bmi_bridge import _BmiCap
class SimpleTimeBmi():
def get_time_units(self):
return 0, 'h'
def get_start_time(self):
return 0, 1.
def get_current_time(self):
return 0, 10.5
def get_end_time(self):
return 0, 72
def get_time_step(self):
return 0, 0.25
class Bmi(_BmiCap):
_cls = SimpleTimeBmi
def test_time_wrap():
bmi = Bmi()
assert_equal(bmi.get_time_units(), 'h')
assert_equal(bmi.get_start_time(), 1.)
assert_equal(bmi.get_current_time(), 10.5)
assert_equal(bmi.get_end_time(), 72.)
assert_equal(bmi.get_time_step(), .25)
assert_equal(bmi.time_units, 'h')
def test_time_conversion():
bmi = Bmi()
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_start_time(units='min'), 60.)
assert_equal(bmi.get_current_time(units='min'), 630.)
assert_equal(bmi.get_end_time(units='d'), 3)
def test_change_time_units():
bmi = Bmi()
assert_equal(bmi.time_units, 'h')
bmi.time_units = 'min'
assert_equal(bmi.get_start_time(), 60.)
assert_equal(bmi.get_current_time(), 630.)
assert_equal(bmi.get_end_time(), 72 * 60)
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_current_time(units='h'), 10.5)
assert_equal(bmi.get_end_time(units='h'), 72)
|
<commit_before><commit_msg>Add tests for time units methods.<commit_after>from nose.tools import assert_equal
from pymt.framework.bmi_bridge import _BmiCap
class SimpleTimeBmi():
def get_time_units(self):
return 0, 'h'
def get_start_time(self):
return 0, 1.
def get_current_time(self):
return 0, 10.5
def get_end_time(self):
return 0, 72
def get_time_step(self):
return 0, 0.25
class Bmi(_BmiCap):
_cls = SimpleTimeBmi
def test_time_wrap():
bmi = Bmi()
assert_equal(bmi.get_time_units(), 'h')
assert_equal(bmi.get_start_time(), 1.)
assert_equal(bmi.get_current_time(), 10.5)
assert_equal(bmi.get_end_time(), 72.)
assert_equal(bmi.get_time_step(), .25)
assert_equal(bmi.time_units, 'h')
def test_time_conversion():
bmi = Bmi()
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_start_time(units='min'), 60.)
assert_equal(bmi.get_current_time(units='min'), 630.)
assert_equal(bmi.get_end_time(units='d'), 3)
def test_change_time_units():
bmi = Bmi()
assert_equal(bmi.time_units, 'h')
bmi.time_units = 'min'
assert_equal(bmi.get_start_time(), 60.)
assert_equal(bmi.get_current_time(), 630.)
assert_equal(bmi.get_end_time(), 72 * 60)
assert_equal(bmi.get_start_time(units='h'), 1.)
assert_equal(bmi.get_current_time(units='h'), 10.5)
assert_equal(bmi.get_end_time(units='h'), 72)
|
|
8a48b9517a844a426319f584ea27ba341d79c8b0
|
tests/pytests/unit/runners/test_spacewalk.py
|
tests/pytests/unit/runners/test_spacewalk.py
|
"""
Unit tests for Spacewalk runner
"""
import salt.runners.spacewalk as spacewalk
from tests.support.mock import Mock, call, patch
def test_api_command_must_have_namespace():
_get_session_mock = Mock(return_value=(None, None))
with patch.object(spacewalk, "_get_session", _get_session_mock):
result = spacewalk.api("mocked.server", "badMethod")
assert result == {
"badMethod ()": "Error: command must use the following format: 'namespace.method'"
}
def test_api_command_accepts_single_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "system.listSystems")
getattr_mock.assert_has_calls(
[
call(client_mock, "system"),
call("mocked_getattr_return", "listSystems"),
]
)
def test_api_command_accepts_nested_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "channel.software.listChildren")
getattr_mock.assert_has_calls(
[
call(client_mock, "channel.software"),
call("mocked_getattr_return", "listChildren"),
]
)
|
Add spacewalk runner command parsing tests
|
Add spacewalk runner command parsing tests
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add spacewalk runner command parsing tests
|
"""
Unit tests for Spacewalk runner
"""
import salt.runners.spacewalk as spacewalk
from tests.support.mock import Mock, call, patch
def test_api_command_must_have_namespace():
_get_session_mock = Mock(return_value=(None, None))
with patch.object(spacewalk, "_get_session", _get_session_mock):
result = spacewalk.api("mocked.server", "badMethod")
assert result == {
"badMethod ()": "Error: command must use the following format: 'namespace.method'"
}
def test_api_command_accepts_single_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "system.listSystems")
getattr_mock.assert_has_calls(
[
call(client_mock, "system"),
call("mocked_getattr_return", "listSystems"),
]
)
def test_api_command_accepts_nested_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "channel.software.listChildren")
getattr_mock.assert_has_calls(
[
call(client_mock, "channel.software"),
call("mocked_getattr_return", "listChildren"),
]
)
|
<commit_before><commit_msg>Add spacewalk runner command parsing tests<commit_after>
|
"""
Unit tests for Spacewalk runner
"""
import salt.runners.spacewalk as spacewalk
from tests.support.mock import Mock, call, patch
def test_api_command_must_have_namespace():
_get_session_mock = Mock(return_value=(None, None))
with patch.object(spacewalk, "_get_session", _get_session_mock):
result = spacewalk.api("mocked.server", "badMethod")
assert result == {
"badMethod ()": "Error: command must use the following format: 'namespace.method'"
}
def test_api_command_accepts_single_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "system.listSystems")
getattr_mock.assert_has_calls(
[
call(client_mock, "system"),
call("mocked_getattr_return", "listSystems"),
]
)
def test_api_command_accepts_nested_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "channel.software.listChildren")
getattr_mock.assert_has_calls(
[
call(client_mock, "channel.software"),
call("mocked_getattr_return", "listChildren"),
]
)
|
Add spacewalk runner command parsing tests"""
Unit tests for Spacewalk runner
"""
import salt.runners.spacewalk as spacewalk
from tests.support.mock import Mock, call, patch
def test_api_command_must_have_namespace():
_get_session_mock = Mock(return_value=(None, None))
with patch.object(spacewalk, "_get_session", _get_session_mock):
result = spacewalk.api("mocked.server", "badMethod")
assert result == {
"badMethod ()": "Error: command must use the following format: 'namespace.method'"
}
def test_api_command_accepts_single_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "system.listSystems")
getattr_mock.assert_has_calls(
[
call(client_mock, "system"),
call("mocked_getattr_return", "listSystems"),
]
)
def test_api_command_accepts_nested_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "channel.software.listChildren")
getattr_mock.assert_has_calls(
[
call(client_mock, "channel.software"),
call("mocked_getattr_return", "listChildren"),
]
)
|
<commit_before><commit_msg>Add spacewalk runner command parsing tests<commit_after>"""
Unit tests for Spacewalk runner
"""
import salt.runners.spacewalk as spacewalk
from tests.support.mock import Mock, call, patch
def test_api_command_must_have_namespace():
_get_session_mock = Mock(return_value=(None, None))
with patch.object(spacewalk, "_get_session", _get_session_mock):
result = spacewalk.api("mocked.server", "badMethod")
assert result == {
"badMethod ()": "Error: command must use the following format: 'namespace.method'"
}
def test_api_command_accepts_single_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "system.listSystems")
getattr_mock.assert_has_calls(
[
call(client_mock, "system"),
call("mocked_getattr_return", "listSystems"),
]
)
def test_api_command_accepts_nested_namespace():
client_mock = Mock()
_get_session_mock = Mock(return_value=(client_mock, "key"))
getattr_mock = Mock(return_value="mocked_getattr_return")
with patch.object(spacewalk, "_get_session", _get_session_mock):
with patch.object(spacewalk, "getattr", getattr_mock):
spacewalk.api("mocked.server", "channel.software.listChildren")
getattr_mock.assert_has_calls(
[
call(client_mock, "channel.software"),
call("mocked_getattr_return", "listChildren"),
]
)
|
|
58f8c6b28014122fc8dcd8ec41e3dd817de2e017
|
openstack/tests/functional/network/v2/test_router_add_remove_interface.py
|
openstack/tests/functional/network/v2/test_router_add_remove_interface.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.network.v2 import network
from openstack.network.v2 import router
from openstack.network.v2 import subnet
from openstack.tests.functional import base
class TestRouterInterface(base.BaseFunctionalTest):
ROUTER_NAME = uuid.uuid4().hex
NET_NAME = uuid.uuid4().hex
SUB_NAME = uuid.uuid4().hex
CIDR = "10.100.0.0/16"
IPV4 = 4
ROUTER_ID = None
NET_ID = None
SUB_ID = None
ROT = None
@classmethod
def setUpClass(cls):
super(TestRouterInterface, cls).setUpClass()
sot = cls.conn.network.create_router(name=cls.ROUTER_NAME)
assert isinstance(sot, router.Router)
cls.assertIs(cls.ROUTER_NAME, sot.name)
net = cls.conn.network.create_network(name=cls.NET_NAME)
assert isinstance(net, network.Network)
cls.assertIs(cls.NET_NAME, net.name)
sub = cls.conn.network.create_subnet(name=cls.SUB_NAME,
ip_version=cls.IPV4,
network_id=net.id,
cidr=cls.CIDR)
assert isinstance(sub, subnet.Subnet)
cls.assertIs(cls.SUB_NAME, sub.name)
cls.ROUTER_ID = sot.id
cls.ROT = sot
cls.NET_ID = net.id
cls.SUB_ID = sub.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.network.delete_router(cls.ROUTER_ID,
ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_subnet(cls.SUB_ID, ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_network(cls.NET_ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_router_add_interface(self):
iface = self.ROT.add_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def test_router_remove_interface(self):
iface = self.ROT.remove_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def _verification(self, interface):
self.assertEqual(interface['subnet_id'], self.SUB_ID)
self.assertIn('port_id', interface)
|
Add functional tests for add & remove router interface
|
Add functional tests for add & remove router interface
Change-Id: If0616ebd088d3840ca09fbc95494f455b85c1967
|
Python
|
apache-2.0
|
stackforge/python-openstacksdk,briancurtin/python-openstacksdk,dudymas/python-openstacksdk,dudymas/python-openstacksdk,dtroyer/python-openstacksdk,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,mtougeron/python-openstacksdk,mtougeron/python-openstacksdk,openstack/python-openstacksdk,openstack/python-openstacksdk,briancurtin/python-openstacksdk
|
Add functional tests for add & remove router interface
Change-Id: If0616ebd088d3840ca09fbc95494f455b85c1967
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.network.v2 import network
from openstack.network.v2 import router
from openstack.network.v2 import subnet
from openstack.tests.functional import base
class TestRouterInterface(base.BaseFunctionalTest):
ROUTER_NAME = uuid.uuid4().hex
NET_NAME = uuid.uuid4().hex
SUB_NAME = uuid.uuid4().hex
CIDR = "10.100.0.0/16"
IPV4 = 4
ROUTER_ID = None
NET_ID = None
SUB_ID = None
ROT = None
@classmethod
def setUpClass(cls):
super(TestRouterInterface, cls).setUpClass()
sot = cls.conn.network.create_router(name=cls.ROUTER_NAME)
assert isinstance(sot, router.Router)
cls.assertIs(cls.ROUTER_NAME, sot.name)
net = cls.conn.network.create_network(name=cls.NET_NAME)
assert isinstance(net, network.Network)
cls.assertIs(cls.NET_NAME, net.name)
sub = cls.conn.network.create_subnet(name=cls.SUB_NAME,
ip_version=cls.IPV4,
network_id=net.id,
cidr=cls.CIDR)
assert isinstance(sub, subnet.Subnet)
cls.assertIs(cls.SUB_NAME, sub.name)
cls.ROUTER_ID = sot.id
cls.ROT = sot
cls.NET_ID = net.id
cls.SUB_ID = sub.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.network.delete_router(cls.ROUTER_ID,
ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_subnet(cls.SUB_ID, ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_network(cls.NET_ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_router_add_interface(self):
iface = self.ROT.add_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def test_router_remove_interface(self):
iface = self.ROT.remove_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def _verification(self, interface):
self.assertEqual(interface['subnet_id'], self.SUB_ID)
self.assertIn('port_id', interface)
|
<commit_before><commit_msg>Add functional tests for add & remove router interface
Change-Id: If0616ebd088d3840ca09fbc95494f455b85c1967<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.network.v2 import network
from openstack.network.v2 import router
from openstack.network.v2 import subnet
from openstack.tests.functional import base
class TestRouterInterface(base.BaseFunctionalTest):
ROUTER_NAME = uuid.uuid4().hex
NET_NAME = uuid.uuid4().hex
SUB_NAME = uuid.uuid4().hex
CIDR = "10.100.0.0/16"
IPV4 = 4
ROUTER_ID = None
NET_ID = None
SUB_ID = None
ROT = None
@classmethod
def setUpClass(cls):
super(TestRouterInterface, cls).setUpClass()
sot = cls.conn.network.create_router(name=cls.ROUTER_NAME)
assert isinstance(sot, router.Router)
cls.assertIs(cls.ROUTER_NAME, sot.name)
net = cls.conn.network.create_network(name=cls.NET_NAME)
assert isinstance(net, network.Network)
cls.assertIs(cls.NET_NAME, net.name)
sub = cls.conn.network.create_subnet(name=cls.SUB_NAME,
ip_version=cls.IPV4,
network_id=net.id,
cidr=cls.CIDR)
assert isinstance(sub, subnet.Subnet)
cls.assertIs(cls.SUB_NAME, sub.name)
cls.ROUTER_ID = sot.id
cls.ROT = sot
cls.NET_ID = net.id
cls.SUB_ID = sub.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.network.delete_router(cls.ROUTER_ID,
ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_subnet(cls.SUB_ID, ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_network(cls.NET_ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_router_add_interface(self):
iface = self.ROT.add_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def test_router_remove_interface(self):
iface = self.ROT.remove_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def _verification(self, interface):
self.assertEqual(interface['subnet_id'], self.SUB_ID)
self.assertIn('port_id', interface)
|
Add functional tests for add & remove router interface
Change-Id: If0616ebd088d3840ca09fbc95494f455b85c1967# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.network.v2 import network
from openstack.network.v2 import router
from openstack.network.v2 import subnet
from openstack.tests.functional import base
class TestRouterInterface(base.BaseFunctionalTest):
ROUTER_NAME = uuid.uuid4().hex
NET_NAME = uuid.uuid4().hex
SUB_NAME = uuid.uuid4().hex
CIDR = "10.100.0.0/16"
IPV4 = 4
ROUTER_ID = None
NET_ID = None
SUB_ID = None
ROT = None
@classmethod
def setUpClass(cls):
super(TestRouterInterface, cls).setUpClass()
sot = cls.conn.network.create_router(name=cls.ROUTER_NAME)
assert isinstance(sot, router.Router)
cls.assertIs(cls.ROUTER_NAME, sot.name)
net = cls.conn.network.create_network(name=cls.NET_NAME)
assert isinstance(net, network.Network)
cls.assertIs(cls.NET_NAME, net.name)
sub = cls.conn.network.create_subnet(name=cls.SUB_NAME,
ip_version=cls.IPV4,
network_id=net.id,
cidr=cls.CIDR)
assert isinstance(sub, subnet.Subnet)
cls.assertIs(cls.SUB_NAME, sub.name)
cls.ROUTER_ID = sot.id
cls.ROT = sot
cls.NET_ID = net.id
cls.SUB_ID = sub.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.network.delete_router(cls.ROUTER_ID,
ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_subnet(cls.SUB_ID, ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_network(cls.NET_ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_router_add_interface(self):
iface = self.ROT.add_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def test_router_remove_interface(self):
iface = self.ROT.remove_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def _verification(self, interface):
self.assertEqual(interface['subnet_id'], self.SUB_ID)
self.assertIn('port_id', interface)
|
<commit_before><commit_msg>Add functional tests for add & remove router interface
Change-Id: If0616ebd088d3840ca09fbc95494f455b85c1967<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.network.v2 import network
from openstack.network.v2 import router
from openstack.network.v2 import subnet
from openstack.tests.functional import base
class TestRouterInterface(base.BaseFunctionalTest):
ROUTER_NAME = uuid.uuid4().hex
NET_NAME = uuid.uuid4().hex
SUB_NAME = uuid.uuid4().hex
CIDR = "10.100.0.0/16"
IPV4 = 4
ROUTER_ID = None
NET_ID = None
SUB_ID = None
ROT = None
@classmethod
def setUpClass(cls):
super(TestRouterInterface, cls).setUpClass()
sot = cls.conn.network.create_router(name=cls.ROUTER_NAME)
assert isinstance(sot, router.Router)
cls.assertIs(cls.ROUTER_NAME, sot.name)
net = cls.conn.network.create_network(name=cls.NET_NAME)
assert isinstance(net, network.Network)
cls.assertIs(cls.NET_NAME, net.name)
sub = cls.conn.network.create_subnet(name=cls.SUB_NAME,
ip_version=cls.IPV4,
network_id=net.id,
cidr=cls.CIDR)
assert isinstance(sub, subnet.Subnet)
cls.assertIs(cls.SUB_NAME, sub.name)
cls.ROUTER_ID = sot.id
cls.ROT = sot
cls.NET_ID = net.id
cls.SUB_ID = sub.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.network.delete_router(cls.ROUTER_ID,
ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_subnet(cls.SUB_ID, ignore_missing=False)
cls.assertIs(None, sot)
sot = cls.conn.network.delete_network(cls.NET_ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_router_add_interface(self):
iface = self.ROT.add_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def test_router_remove_interface(self):
iface = self.ROT.remove_interface(self.conn.session, self.SUB_ID)
self._verification(iface)
def _verification(self, interface):
self.assertEqual(interface['subnet_id'], self.SUB_ID)
self.assertIn('port_id', interface)
|
|
c426e8845632d13f27b1cbc71d2c13292cc88711
|
buildbucket.py
|
buildbucket.py
|
#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool for interacting with Buildbucket.
Usage:
$ depot-tools-auth login https://cr-buildbucket.appspot.com
$ buildbucket.py \
put \
--bucket master.tryserver.chromium.linux \
--builder my-builder \
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
import argparse
import json
import urlparse
import os
import sys
from third_party import httplib2
import auth
BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
PUT_BUILD_URL = urlparse.urljoin(
BUILDBUCKET_URL,
'_ah/api/buildbucket/v1/builds',
)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-v',
'--verbose',
action='store_true',
)
subparsers = parser.add_subparsers(dest='command')
put_parser = subparsers.add_parser('put')
put_parser.add_argument(
'--bucket',
help=(
'The bucket to schedule the build on. Typically the master name, e.g.'
' master.tryserver.chromium.linux.'
),
required=True,
)
put_parser.add_argument(
'-n',
'--builder-name',
help='The builder to schedule the build on.',
required=True,
)
put_parser.add_argument(
'-p',
'--properties',
help='A file to load a JSON dict of properties from.',
)
args = parser.parse_args()
# TODO(smut): When more commands are implemented, refactor this.
assert args.command == 'put'
properties = {}
if args.properties:
try:
with open(args.properties) as fp:
properties.update(json.load(fp))
except (TypeError, ValueError):
sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
raise
authenticator = auth.get_authenticator_for_host(
BUILDBUCKET_URL,
auth.make_auth_config(use_oauth2=True),
)
http = authenticator.authorize(httplib2.Http())
http.force_exception_to_status_code = True
response, content = http.request(
PUT_BUILD_URL,
'PUT',
body=json.dumps({
'bucket': args.bucket,
'parameters_json': json.dumps({
'builder_name': args.builder_name,
'properties': properties,
}),
}),
headers={'Content-Type': 'application/json'},
)
if args.verbose:
print content
return response.status != 200
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add script for triggering Buildbucket builds
|
Add script for triggering Buildbucket builds
BUG=493885
TESTED=See https://paste.googleplex.com/5622248052359168
Review URL: https://codereview.chromium.org/1164363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@295569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
Add script for triggering Buildbucket builds
BUG=493885
TESTED=See https://paste.googleplex.com/5622248052359168
Review URL: https://codereview.chromium.org/1164363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@295569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool for interacting with Buildbucket.
Usage:
$ depot-tools-auth login https://cr-buildbucket.appspot.com
$ buildbucket.py \
put \
--bucket master.tryserver.chromium.linux \
--builder my-builder \
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
import argparse
import json
import urlparse
import os
import sys
from third_party import httplib2
import auth
BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
PUT_BUILD_URL = urlparse.urljoin(
BUILDBUCKET_URL,
'_ah/api/buildbucket/v1/builds',
)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-v',
'--verbose',
action='store_true',
)
subparsers = parser.add_subparsers(dest='command')
put_parser = subparsers.add_parser('put')
put_parser.add_argument(
'--bucket',
help=(
'The bucket to schedule the build on. Typically the master name, e.g.'
' master.tryserver.chromium.linux.'
),
required=True,
)
put_parser.add_argument(
'-n',
'--builder-name',
help='The builder to schedule the build on.',
required=True,
)
put_parser.add_argument(
'-p',
'--properties',
help='A file to load a JSON dict of properties from.',
)
args = parser.parse_args()
# TODO(smut): When more commands are implemented, refactor this.
assert args.command == 'put'
properties = {}
if args.properties:
try:
with open(args.properties) as fp:
properties.update(json.load(fp))
except (TypeError, ValueError):
sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
raise
authenticator = auth.get_authenticator_for_host(
BUILDBUCKET_URL,
auth.make_auth_config(use_oauth2=True),
)
http = authenticator.authorize(httplib2.Http())
http.force_exception_to_status_code = True
response, content = http.request(
PUT_BUILD_URL,
'PUT',
body=json.dumps({
'bucket': args.bucket,
'parameters_json': json.dumps({
'builder_name': args.builder_name,
'properties': properties,
}),
}),
headers={'Content-Type': 'application/json'},
)
if args.verbose:
print content
return response.status != 200
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add script for triggering Buildbucket builds
BUG=493885
TESTED=See https://paste.googleplex.com/5622248052359168
Review URL: https://codereview.chromium.org/1164363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@295569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool for interacting with Buildbucket.
Usage:
$ depot-tools-auth login https://cr-buildbucket.appspot.com
$ buildbucket.py \
put \
--bucket master.tryserver.chromium.linux \
--builder my-builder \
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
import argparse
import json
import urlparse
import os
import sys
from third_party import httplib2
import auth
BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
PUT_BUILD_URL = urlparse.urljoin(
BUILDBUCKET_URL,
'_ah/api/buildbucket/v1/builds',
)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-v',
'--verbose',
action='store_true',
)
subparsers = parser.add_subparsers(dest='command')
put_parser = subparsers.add_parser('put')
put_parser.add_argument(
'--bucket',
help=(
'The bucket to schedule the build on. Typically the master name, e.g.'
' master.tryserver.chromium.linux.'
),
required=True,
)
put_parser.add_argument(
'-n',
'--builder-name',
help='The builder to schedule the build on.',
required=True,
)
put_parser.add_argument(
'-p',
'--properties',
help='A file to load a JSON dict of properties from.',
)
args = parser.parse_args()
# TODO(smut): When more commands are implemented, refactor this.
assert args.command == 'put'
properties = {}
if args.properties:
try:
with open(args.properties) as fp:
properties.update(json.load(fp))
except (TypeError, ValueError):
sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
raise
authenticator = auth.get_authenticator_for_host(
BUILDBUCKET_URL,
auth.make_auth_config(use_oauth2=True),
)
http = authenticator.authorize(httplib2.Http())
http.force_exception_to_status_code = True
response, content = http.request(
PUT_BUILD_URL,
'PUT',
body=json.dumps({
'bucket': args.bucket,
'parameters_json': json.dumps({
'builder_name': args.builder_name,
'properties': properties,
}),
}),
headers={'Content-Type': 'application/json'},
)
if args.verbose:
print content
return response.status != 200
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add script for triggering Buildbucket builds
BUG=493885
TESTED=See https://paste.googleplex.com/5622248052359168
Review URL: https://codereview.chromium.org/1164363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@295569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool for interacting with Buildbucket.
Usage:
$ depot-tools-auth login https://cr-buildbucket.appspot.com
$ buildbucket.py \
put \
--bucket master.tryserver.chromium.linux \
--builder my-builder \
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
import argparse
import json
import urlparse
import os
import sys
from third_party import httplib2
import auth
BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
PUT_BUILD_URL = urlparse.urljoin(
BUILDBUCKET_URL,
'_ah/api/buildbucket/v1/builds',
)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-v',
'--verbose',
action='store_true',
)
subparsers = parser.add_subparsers(dest='command')
put_parser = subparsers.add_parser('put')
put_parser.add_argument(
'--bucket',
help=(
'The bucket to schedule the build on. Typically the master name, e.g.'
' master.tryserver.chromium.linux.'
),
required=True,
)
put_parser.add_argument(
'-n',
'--builder-name',
help='The builder to schedule the build on.',
required=True,
)
put_parser.add_argument(
'-p',
'--properties',
help='A file to load a JSON dict of properties from.',
)
args = parser.parse_args()
# TODO(smut): When more commands are implemented, refactor this.
assert args.command == 'put'
properties = {}
if args.properties:
try:
with open(args.properties) as fp:
properties.update(json.load(fp))
except (TypeError, ValueError):
sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
raise
authenticator = auth.get_authenticator_for_host(
BUILDBUCKET_URL,
auth.make_auth_config(use_oauth2=True),
)
http = authenticator.authorize(httplib2.Http())
http.force_exception_to_status_code = True
response, content = http.request(
PUT_BUILD_URL,
'PUT',
body=json.dumps({
'bucket': args.bucket,
'parameters_json': json.dumps({
'builder_name': args.builder_name,
'properties': properties,
}),
}),
headers={'Content-Type': 'application/json'},
)
if args.verbose:
print content
return response.status != 200
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add script for triggering Buildbucket builds
BUG=493885
TESTED=See https://paste.googleplex.com/5622248052359168
Review URL: https://codereview.chromium.org/1164363003
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@295569 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after>#!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Tool for interacting with Buildbucket.
Usage:
$ depot-tools-auth login https://cr-buildbucket.appspot.com
$ buildbucket.py \
put \
--bucket master.tryserver.chromium.linux \
--builder my-builder \
Puts a build into buildbucket for my-builder on tryserver.chromium.linux.
"""
import argparse
import json
import urlparse
import os
import sys
from third_party import httplib2
import auth
BUILDBUCKET_URL = 'https://cr-buildbucket.appspot.com'
PUT_BUILD_URL = urlparse.urljoin(
BUILDBUCKET_URL,
'_ah/api/buildbucket/v1/builds',
)
def main(argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-v',
'--verbose',
action='store_true',
)
subparsers = parser.add_subparsers(dest='command')
put_parser = subparsers.add_parser('put')
put_parser.add_argument(
'--bucket',
help=(
'The bucket to schedule the build on. Typically the master name, e.g.'
' master.tryserver.chromium.linux.'
),
required=True,
)
put_parser.add_argument(
'-n',
'--builder-name',
help='The builder to schedule the build on.',
required=True,
)
put_parser.add_argument(
'-p',
'--properties',
help='A file to load a JSON dict of properties from.',
)
args = parser.parse_args()
# TODO(smut): When more commands are implemented, refactor this.
assert args.command == 'put'
properties = {}
if args.properties:
try:
with open(args.properties) as fp:
properties.update(json.load(fp))
except (TypeError, ValueError):
sys.stderr.write('%s contained invalid JSON dict.\n' % args.properties)
raise
authenticator = auth.get_authenticator_for_host(
BUILDBUCKET_URL,
auth.make_auth_config(use_oauth2=True),
)
http = authenticator.authorize(httplib2.Http())
http.force_exception_to_status_code = True
response, content = http.request(
PUT_BUILD_URL,
'PUT',
body=json.dumps({
'bucket': args.bucket,
'parameters_json': json.dumps({
'builder_name': args.builder_name,
'properties': properties,
}),
}),
headers={'Content-Type': 'application/json'},
)
if args.verbose:
print content
return response.status != 200
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
|
9790c81d5b1d0a265c75f191320cb4cb22dfbd27
|
addons/hw_drivers/drivers/KeyboardUSBDriver.py
|
addons/hw_drivers/drivers/KeyboardUSBDriver.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import evdev
import logging
from usb import util
from odoo import _
from odoo.addons.hw_drivers.controllers.driver import event_manager, Driver
_logger = logging.getLogger(__name__)
class KeyboardUSBDriver(Driver):
connection_type = 'usb'
def __init__(self, device):
super(KeyboardUSBDriver, self).__init__(device)
self._device_type = 'device'
self._device_connection = 'direct'
self._device_name = self._get_name()
@classmethod
def supported(cls, device):
for cfg in device:
for itf in cfg:
if itf.bInterfaceClass == 3 and itf.bInterfaceProtocol == 1:
return True
return False
def _get_name(self):
try:
manufacturer = util.get_string(self.dev, 256, self.dev.iManufacturer)
product = util.get_string(self.dev, 256, self.dev.iProduct)
return ("%s - %s") % (manufacturer, product)
except ValueError as e:
_logger.warning(e)
return _('Unknow keyboard')
def action(self, data):
self.data['value'] = ''
event_manager.device_changed(self)
def run(self):
devices = [evdev.InputDevice(path) for path in evdev.list_devices()]
for device in devices:
if (self.dev.idVendor == device.info.vendor) and (self.dev.idProduct == device.info.product):
path = device.path
device = evdev.InputDevice(path)
try:
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
data = evdev.categorize(event)
if data.keystate:
self.data['value'] = data.keycode.replace('KEY_','')
event_manager.device_changed(self)
except Exception as err:
_logger.warning(err)
|
Move Keyboard driver to Community
|
[IMP] hw_drivers: Move Keyboard driver to Community
The KeyboardUSBDriver will be transformed to integrate barcode
scanners. The barcode scanners will be used in Community so we move the
driver in Community.
TaskID: 1961025
|
Python
|
agpl-3.0
|
ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo,ygol/odoo
|
[IMP] hw_drivers: Move Keyboard driver to Community
The KeyboardUSBDriver will be transformed to integrate barcode
scanners. The barcode scanners will be used in Community so we move the
driver in Community.
TaskID: 1961025
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import evdev
import logging
from usb import util
from odoo import _
from odoo.addons.hw_drivers.controllers.driver import event_manager, Driver
_logger = logging.getLogger(__name__)
class KeyboardUSBDriver(Driver):
connection_type = 'usb'
def __init__(self, device):
super(KeyboardUSBDriver, self).__init__(device)
self._device_type = 'device'
self._device_connection = 'direct'
self._device_name = self._get_name()
@classmethod
def supported(cls, device):
for cfg in device:
for itf in cfg:
if itf.bInterfaceClass == 3 and itf.bInterfaceProtocol == 1:
return True
return False
def _get_name(self):
try:
manufacturer = util.get_string(self.dev, 256, self.dev.iManufacturer)
product = util.get_string(self.dev, 256, self.dev.iProduct)
return ("%s - %s") % (manufacturer, product)
except ValueError as e:
_logger.warning(e)
return _('Unknow keyboard')
def action(self, data):
self.data['value'] = ''
event_manager.device_changed(self)
def run(self):
devices = [evdev.InputDevice(path) for path in evdev.list_devices()]
for device in devices:
if (self.dev.idVendor == device.info.vendor) and (self.dev.idProduct == device.info.product):
path = device.path
device = evdev.InputDevice(path)
try:
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
data = evdev.categorize(event)
if data.keystate:
self.data['value'] = data.keycode.replace('KEY_','')
event_manager.device_changed(self)
except Exception as err:
_logger.warning(err)
|
<commit_before><commit_msg>[IMP] hw_drivers: Move Keyboard driver to Community
The KeyboardUSBDriver will be transformed to integrate barcode
scanners. The barcode scanners will be used in Community so we move the
driver in Community.
TaskID: 1961025<commit_after>
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import evdev
import logging
from usb import util
from odoo import _
from odoo.addons.hw_drivers.controllers.driver import event_manager, Driver
_logger = logging.getLogger(__name__)
class KeyboardUSBDriver(Driver):
connection_type = 'usb'
def __init__(self, device):
super(KeyboardUSBDriver, self).__init__(device)
self._device_type = 'device'
self._device_connection = 'direct'
self._device_name = self._get_name()
@classmethod
def supported(cls, device):
for cfg in device:
for itf in cfg:
if itf.bInterfaceClass == 3 and itf.bInterfaceProtocol == 1:
return True
return False
def _get_name(self):
try:
manufacturer = util.get_string(self.dev, 256, self.dev.iManufacturer)
product = util.get_string(self.dev, 256, self.dev.iProduct)
return ("%s - %s") % (manufacturer, product)
except ValueError as e:
_logger.warning(e)
return _('Unknow keyboard')
def action(self, data):
self.data['value'] = ''
event_manager.device_changed(self)
def run(self):
devices = [evdev.InputDevice(path) for path in evdev.list_devices()]
for device in devices:
if (self.dev.idVendor == device.info.vendor) and (self.dev.idProduct == device.info.product):
path = device.path
device = evdev.InputDevice(path)
try:
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
data = evdev.categorize(event)
if data.keystate:
self.data['value'] = data.keycode.replace('KEY_','')
event_manager.device_changed(self)
except Exception as err:
_logger.warning(err)
|
[IMP] hw_drivers: Move Keyboard driver to Community
The KeyboardUSBDriver will be transformed to integrate barcode
scanners. The barcode scanners will be used in Community so we move the
driver in Community.
TaskID: 1961025# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import evdev
import logging
from usb import util
from odoo import _
from odoo.addons.hw_drivers.controllers.driver import event_manager, Driver
_logger = logging.getLogger(__name__)
class KeyboardUSBDriver(Driver):
connection_type = 'usb'
def __init__(self, device):
super(KeyboardUSBDriver, self).__init__(device)
self._device_type = 'device'
self._device_connection = 'direct'
self._device_name = self._get_name()
@classmethod
def supported(cls, device):
for cfg in device:
for itf in cfg:
if itf.bInterfaceClass == 3 and itf.bInterfaceProtocol == 1:
return True
return False
def _get_name(self):
try:
manufacturer = util.get_string(self.dev, 256, self.dev.iManufacturer)
product = util.get_string(self.dev, 256, self.dev.iProduct)
return ("%s - %s") % (manufacturer, product)
except ValueError as e:
_logger.warning(e)
return _('Unknow keyboard')
def action(self, data):
self.data['value'] = ''
event_manager.device_changed(self)
def run(self):
devices = [evdev.InputDevice(path) for path in evdev.list_devices()]
for device in devices:
if (self.dev.idVendor == device.info.vendor) and (self.dev.idProduct == device.info.product):
path = device.path
device = evdev.InputDevice(path)
try:
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
data = evdev.categorize(event)
if data.keystate:
self.data['value'] = data.keycode.replace('KEY_','')
event_manager.device_changed(self)
except Exception as err:
_logger.warning(err)
|
<commit_before><commit_msg>[IMP] hw_drivers: Move Keyboard driver to Community
The KeyboardUSBDriver will be transformed to integrate barcode
scanners. The barcode scanners will be used in Community so we move the
driver in Community.
TaskID: 1961025<commit_after># -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import evdev
import logging
from usb import util
from odoo import _
from odoo.addons.hw_drivers.controllers.driver import event_manager, Driver
_logger = logging.getLogger(__name__)
class KeyboardUSBDriver(Driver):
connection_type = 'usb'
def __init__(self, device):
super(KeyboardUSBDriver, self).__init__(device)
self._device_type = 'device'
self._device_connection = 'direct'
self._device_name = self._get_name()
@classmethod
def supported(cls, device):
for cfg in device:
for itf in cfg:
if itf.bInterfaceClass == 3 and itf.bInterfaceProtocol == 1:
return True
return False
def _get_name(self):
try:
manufacturer = util.get_string(self.dev, 256, self.dev.iManufacturer)
product = util.get_string(self.dev, 256, self.dev.iProduct)
return ("%s - %s") % (manufacturer, product)
except ValueError as e:
_logger.warning(e)
return _('Unknow keyboard')
def action(self, data):
self.data['value'] = ''
event_manager.device_changed(self)
def run(self):
devices = [evdev.InputDevice(path) for path in evdev.list_devices()]
for device in devices:
if (self.dev.idVendor == device.info.vendor) and (self.dev.idProduct == device.info.product):
path = device.path
device = evdev.InputDevice(path)
try:
for event in device.read_loop():
if event.type == evdev.ecodes.EV_KEY:
data = evdev.categorize(event)
if data.keystate:
self.data['value'] = data.keycode.replace('KEY_','')
event_manager.device_changed(self)
except Exception as err:
_logger.warning(err)
|
|
8a58e8052c2873181278100de575a24392fe0299
|
CodeFights/depositProfit.py
|
CodeFights/depositProfit.py
|
#!/usr/local/bin/python
# Code Fights Deposit Profit Problem
def depositProfit(deposit, rate, threshold):
years = 0
while deposit < threshold:
deposit *= 1 + rate / 100
years += 1
return years
def main():
tests = [
[100, 20, 170, 3],
[100, 1, 101, 1],
[1, 100, 64, 6]
]
for t in tests:
res = depositProfit(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: depositProfit({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: depositProfit({}, {}, {}) returned {}, answer: {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
|
Solve Code Fights deposit profit problem
|
Solve Code Fights deposit profit problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights deposit profit problem
|
#!/usr/local/bin/python
# Code Fights Deposit Profit Problem
def depositProfit(deposit, rate, threshold):
years = 0
while deposit < threshold:
deposit *= 1 + rate / 100
years += 1
return years
def main():
tests = [
[100, 20, 170, 3],
[100, 1, 101, 1],
[1, 100, 64, 6]
]
for t in tests:
res = depositProfit(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: depositProfit({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: depositProfit({}, {}, {}) returned {}, answer: {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights deposit profit problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Deposit Profit Problem
def depositProfit(deposit, rate, threshold):
years = 0
while deposit < threshold:
deposit *= 1 + rate / 100
years += 1
return years
def main():
tests = [
[100, 20, 170, 3],
[100, 1, 101, 1],
[1, 100, 64, 6]
]
for t in tests:
res = depositProfit(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: depositProfit({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: depositProfit({}, {}, {}) returned {}, answer: {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
|
Solve Code Fights deposit profit problem#!/usr/local/bin/python
# Code Fights Deposit Profit Problem
def depositProfit(deposit, rate, threshold):
years = 0
while deposit < threshold:
deposit *= 1 + rate / 100
years += 1
return years
def main():
tests = [
[100, 20, 170, 3],
[100, 1, 101, 1],
[1, 100, 64, 6]
]
for t in tests:
res = depositProfit(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: depositProfit({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: depositProfit({}, {}, {}) returned {}, answer: {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights deposit profit problem<commit_after>#!/usr/local/bin/python
# Code Fights Deposit Profit Problem
def depositProfit(deposit, rate, threshold):
years = 0
while deposit < threshold:
deposit *= 1 + rate / 100
years += 1
return years
def main():
tests = [
[100, 20, 170, 3],
[100, 1, 101, 1],
[1, 100, 64, 6]
]
for t in tests:
res = depositProfit(t[0], t[1], t[2])
if t[3] == res:
print("PASSED: depositProfit({}, {}, {}) returned {}"
.format(t[0], t[1], t[2], res))
else:
print("FAILED: depositProfit({}, {}, {}) returned {}, answer: {}"
.format(t[0], t[1], t[2], res, t[3]))
if __name__ == '__main__':
main()
|
|
b8b1347ef623507f0a8bf6753535d0b3bad217bb
|
pombola/south_africa/management/commands/south_africa_export_na_members.py
|
pombola/south_africa/management/commands/south_africa_export_na_members.py
|
"""Export a CSV listing National Assembly members with term dates."""
import unicodecsv as csv
import os
import collections
from pombola.core.models import Person, Organisation
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = 'destination'
help = 'Export a CSV listing National Assembly members with term dates.'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("You must provide a destination.")
destination = args[0]
organisation = Organisation.objects.filter(slug='national-assembly').get()
fields = [
'name',
'title',
'given_name',
'family_name',
'url',
'start_date',
'end_date',
]
with open(os.path.join(destination), 'wb') as output_file:
writer = csv.DictWriter(output_file, fieldnames=fields)
writer.writeheader()
# Get the list of positions
positions = organisation.position_set.filter(person__hidden=False)
# Write all the outputs
for position in positions:
print position
person = position.person
position_output = {
'name': person.name,
'title': person.title,
'given_name': person.given_name,
'family_name': person.family_name,
'url': 'https://www.pa.org.za/person/{}/'.format(person.slug),
'start_date': position.start_date,
'end_date': position.end_date,
}
writer.writerow(position_output)
print "Done! Exported CSV of " + str(len(positions)) + " positions."
|
Add script to export NA members and their position start/end dates
|
Add script to export NA members and their position start/end dates
|
Python
|
agpl-3.0
|
mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola
|
Add script to export NA members and their position start/end dates
|
"""Export a CSV listing National Assembly members with term dates."""
import unicodecsv as csv
import os
import collections
from pombola.core.models import Person, Organisation
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = 'destination'
help = 'Export a CSV listing National Assembly members with term dates.'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("You must provide a destination.")
destination = args[0]
organisation = Organisation.objects.filter(slug='national-assembly').get()
fields = [
'name',
'title',
'given_name',
'family_name',
'url',
'start_date',
'end_date',
]
with open(os.path.join(destination), 'wb') as output_file:
writer = csv.DictWriter(output_file, fieldnames=fields)
writer.writeheader()
# Get the list of positions
positions = organisation.position_set.filter(person__hidden=False)
# Write all the outputs
for position in positions:
print position
person = position.person
position_output = {
'name': person.name,
'title': person.title,
'given_name': person.given_name,
'family_name': person.family_name,
'url': 'https://www.pa.org.za/person/{}/'.format(person.slug),
'start_date': position.start_date,
'end_date': position.end_date,
}
writer.writerow(position_output)
print "Done! Exported CSV of " + str(len(positions)) + " positions."
|
<commit_before><commit_msg>Add script to export NA members and their position start/end dates<commit_after>
|
"""Export a CSV listing National Assembly members with term dates."""
import unicodecsv as csv
import os
import collections
from pombola.core.models import Person, Organisation
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = 'destination'
help = 'Export a CSV listing National Assembly members with term dates.'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("You must provide a destination.")
destination = args[0]
organisation = Organisation.objects.filter(slug='national-assembly').get()
fields = [
'name',
'title',
'given_name',
'family_name',
'url',
'start_date',
'end_date',
]
with open(os.path.join(destination), 'wb') as output_file:
writer = csv.DictWriter(output_file, fieldnames=fields)
writer.writeheader()
# Get the list of positions
positions = organisation.position_set.filter(person__hidden=False)
# Write all the outputs
for position in positions:
print position
person = position.person
position_output = {
'name': person.name,
'title': person.title,
'given_name': person.given_name,
'family_name': person.family_name,
'url': 'https://www.pa.org.za/person/{}/'.format(person.slug),
'start_date': position.start_date,
'end_date': position.end_date,
}
writer.writerow(position_output)
print "Done! Exported CSV of " + str(len(positions)) + " positions."
|
Add script to export NA members and their position start/end dates"""Export a CSV listing National Assembly members with term dates."""
import unicodecsv as csv
import os
import collections
from pombola.core.models import Person, Organisation
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = 'destination'
help = 'Export a CSV listing National Assembly members with term dates.'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("You must provide a destination.")
destination = args[0]
organisation = Organisation.objects.filter(slug='national-assembly').get()
fields = [
'name',
'title',
'given_name',
'family_name',
'url',
'start_date',
'end_date',
]
with open(os.path.join(destination), 'wb') as output_file:
writer = csv.DictWriter(output_file, fieldnames=fields)
writer.writeheader()
# Get the list of positions
positions = organisation.position_set.filter(person__hidden=False)
# Write all the outputs
for position in positions:
print position
person = position.person
position_output = {
'name': person.name,
'title': person.title,
'given_name': person.given_name,
'family_name': person.family_name,
'url': 'https://www.pa.org.za/person/{}/'.format(person.slug),
'start_date': position.start_date,
'end_date': position.end_date,
}
writer.writerow(position_output)
print "Done! Exported CSV of " + str(len(positions)) + " positions."
|
<commit_before><commit_msg>Add script to export NA members and their position start/end dates<commit_after>"""Export a CSV listing National Assembly members with term dates."""
import unicodecsv as csv
import os
import collections
from pombola.core.models import Person, Organisation
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
args = 'destination'
help = 'Export a CSV listing National Assembly members with term dates.'
def handle(self, *args, **options):
if len(args) != 1:
raise CommandError("You must provide a destination.")
destination = args[0]
organisation = Organisation.objects.filter(slug='national-assembly').get()
fields = [
'name',
'title',
'given_name',
'family_name',
'url',
'start_date',
'end_date',
]
with open(os.path.join(destination), 'wb') as output_file:
writer = csv.DictWriter(output_file, fieldnames=fields)
writer.writeheader()
# Get the list of positions
positions = organisation.position_set.filter(person__hidden=False)
# Write all the outputs
for position in positions:
print position
person = position.person
position_output = {
'name': person.name,
'title': person.title,
'given_name': person.given_name,
'family_name': person.family_name,
'url': 'https://www.pa.org.za/person/{}/'.format(person.slug),
'start_date': position.start_date,
'end_date': position.end_date,
}
writer.writerow(position_output)
print "Done! Exported CSV of " + str(len(positions)) + " positions."
|
|
0d9656bf5031f3a4d393bf81f8322909ba48110b
|
radio/migrations/0026_auto_20170305_1336.py
|
radio/migrations/0026_auto_20170305_1336.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-03-05 21:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('radio', '0025_unit_slug'),
]
operations = [
migrations.AlterField(
model_name='talkgroup',
name='dec_id',
field=models.IntegerField(),
),
]
|
Add DB migration to remove unique on TalkGroup dec_id
|
Add DB migration to remove unique on TalkGroup dec_id
|
Python
|
mit
|
ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player,ScanOC/trunk-player
|
Add DB migration to remove unique on TalkGroup dec_id
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-03-05 21:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('radio', '0025_unit_slug'),
]
operations = [
migrations.AlterField(
model_name='talkgroup',
name='dec_id',
field=models.IntegerField(),
),
]
|
<commit_before><commit_msg>Add DB migration to remove unique on TalkGroup dec_id<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-03-05 21:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('radio', '0025_unit_slug'),
]
operations = [
migrations.AlterField(
model_name='talkgroup',
name='dec_id',
field=models.IntegerField(),
),
]
|
Add DB migration to remove unique on TalkGroup dec_id# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-03-05 21:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('radio', '0025_unit_slug'),
]
operations = [
migrations.AlterField(
model_name='talkgroup',
name='dec_id',
field=models.IntegerField(),
),
]
|
<commit_before><commit_msg>Add DB migration to remove unique on TalkGroup dec_id<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2017-03-05 21:36
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('radio', '0025_unit_slug'),
]
operations = [
migrations.AlterField(
model_name='talkgroup',
name='dec_id',
field=models.IntegerField(),
),
]
|
|
696c9f81590eff6a127eed91257517cb0e37c81c
|
CodeFights/primeSum.py
|
CodeFights/primeSum.py
|
#!/usr/local/bin/python
# Code Fights Prime Sum Problem
def primeSum(a, b):
return sum(filter(lambda p: p > 1 and all(p % n for n in
range(2, int(p**0.5) + 1)), range(a, b + 1)))
def main():
tests = [
[10, 20, 60],
[1, 7, 17],
[5, 10, 12],
[24, 28, 0],
[13, 13, 13]
]
for t in tests:
res = primeSum(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: primeSum({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: primeSum({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights prime sum problem
|
Solve Code Fights prime sum problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights prime sum problem
|
#!/usr/local/bin/python
# Code Fights Prime Sum Problem
def primeSum(a, b):
return sum(filter(lambda p: p > 1 and all(p % n for n in
range(2, int(p**0.5) + 1)), range(a, b + 1)))
def main():
tests = [
[10, 20, 60],
[1, 7, 17],
[5, 10, 12],
[24, 28, 0],
[13, 13, 13]
]
for t in tests:
res = primeSum(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: primeSum({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: primeSum({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights prime sum problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Prime Sum Problem
def primeSum(a, b):
return sum(filter(lambda p: p > 1 and all(p % n for n in
range(2, int(p**0.5) + 1)), range(a, b + 1)))
def main():
tests = [
[10, 20, 60],
[1, 7, 17],
[5, 10, 12],
[24, 28, 0],
[13, 13, 13]
]
for t in tests:
res = primeSum(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: primeSum({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: primeSum({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights prime sum problem#!/usr/local/bin/python
# Code Fights Prime Sum Problem
def primeSum(a, b):
return sum(filter(lambda p: p > 1 and all(p % n for n in
range(2, int(p**0.5) + 1)), range(a, b + 1)))
def main():
tests = [
[10, 20, 60],
[1, 7, 17],
[5, 10, 12],
[24, 28, 0],
[13, 13, 13]
]
for t in tests:
res = primeSum(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: primeSum({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: primeSum({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights prime sum problem<commit_after>#!/usr/local/bin/python
# Code Fights Prime Sum Problem
def primeSum(a, b):
return sum(filter(lambda p: p > 1 and all(p % n for n in
range(2, int(p**0.5) + 1)), range(a, b + 1)))
def main():
tests = [
[10, 20, 60],
[1, 7, 17],
[5, 10, 12],
[24, 28, 0],
[13, 13, 13]
]
for t in tests:
res = primeSum(t[0], t[1])
ans = t[2]
if ans == res:
print("PASSED: primeSum({}, {}) returned {}"
.format(t[0], t[1], res))
else:
print("FAILED: primeSum({}, {}) returned {}, answer: {}"
.format(t[0], t[1], res, ans))
if __name__ == '__main__':
main()
|
|
33282c65743c86cbad38160b801e7155ab16c60f
|
tests/data_checks/test_gwas_catalog_coverage.py
|
tests/data_checks/test_gwas_catalog_coverage.py
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestGWASCatalogCoverage(TestPostgapBase):
def test_each_gwas_efo_covered(self):
self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT')
def test_each_gwas_snp_covered(self):
self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT')
if __name__ == '__main__':
unittest.main()
|
Add placeholder for gwas catalog coverage
|
Add placeholder for gwas catalog coverage
|
Python
|
apache-2.0
|
Ensembl/cttv024,Ensembl/cttv024
|
Add placeholder for gwas catalog coverage
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestGWASCatalogCoverage(TestPostgapBase):
def test_each_gwas_efo_covered(self):
self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT')
def test_each_gwas_snp_covered(self):
self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add placeholder for gwas catalog coverage<commit_after>
|
# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestGWASCatalogCoverage(TestPostgapBase):
def test_each_gwas_efo_covered(self):
self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT')
def test_each_gwas_snp_covered(self):
self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT')
if __name__ == '__main__':
unittest.main()
|
Add placeholder for gwas catalog coverage# ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestGWASCatalogCoverage(TestPostgapBase):
def test_each_gwas_efo_covered(self):
self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT')
def test_each_gwas_snp_covered(self):
self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT')
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add placeholder for gwas catalog coverage<commit_after># ------------------------------------------------
# built-ins
import unittest
# local
from utils.base import TestPostgapBase
# ------------------------------------------------
class TestGWASCatalogCoverage(TestPostgapBase):
def test_each_gwas_efo_covered(self):
self.skipTest('EACH GWAS EFO ID COVERED IN POSTGAP OUTPUT')
def test_each_gwas_snp_covered(self):
self.skipTest('EACH GWAS SNP ID COVERED IN POSTGAP OUTPUT')
if __name__ == '__main__':
unittest.main()
|
|
197c0fc802bd6936790500339fa64cbded18ab46
|
letsmeet/main/templatetags/rich_text.py
|
letsmeet/main/templatetags/rich_text.py
|
import bleach
import markdown
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'table',
'tbody',
'thead',
'tr',
'td',
'th',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
'table': ['width'],
'td': ['width', 'align'],
}
@register.filter
def rich_text(text: str, **kwargs):
"""
Processes markdown and cleans HTML in a text input.
"""
if not text:
return ""
body_md = bleach.linkify(bleach.clean(markdown.markdown(str(text)),
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES))
return mark_safe(body_md)
|
Add a templatetag to display markdown
|
Add a templatetag to display markdown
|
Python
|
mit
|
letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click,letsmeet-click/letsmeet.click
|
Add a templatetag to display markdown
|
import bleach
import markdown
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'table',
'tbody',
'thead',
'tr',
'td',
'th',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
'table': ['width'],
'td': ['width', 'align'],
}
@register.filter
def rich_text(text: str, **kwargs):
"""
Processes markdown and cleans HTML in a text input.
"""
if not text:
return ""
body_md = bleach.linkify(bleach.clean(markdown.markdown(str(text)),
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES))
return mark_safe(body_md)
|
<commit_before><commit_msg>Add a templatetag to display markdown<commit_after>
|
import bleach
import markdown
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'table',
'tbody',
'thead',
'tr',
'td',
'th',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
'table': ['width'],
'td': ['width', 'align'],
}
@register.filter
def rich_text(text: str, **kwargs):
"""
Processes markdown and cleans HTML in a text input.
"""
if not text:
return ""
body_md = bleach.linkify(bleach.clean(markdown.markdown(str(text)),
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES))
return mark_safe(body_md)
|
Add a templatetag to display markdownimport bleach
import markdown
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'table',
'tbody',
'thead',
'tr',
'td',
'th',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
'table': ['width'],
'td': ['width', 'align'],
}
@register.filter
def rich_text(text: str, **kwargs):
"""
Processes markdown and cleans HTML in a text input.
"""
if not text:
return ""
body_md = bleach.linkify(bleach.clean(markdown.markdown(str(text)),
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES))
return mark_safe(body_md)
|
<commit_before><commit_msg>Add a templatetag to display markdown<commit_after>import bleach
import markdown
from django import template
from django.utils.safestring import mark_safe
register = template.Library()
ALLOWED_TAGS = [
'a',
'abbr',
'acronym',
'b',
'blockquote',
'code',
'em',
'i',
'li',
'ol',
'strong',
'ul',
'p',
'table',
'tbody',
'thead',
'tr',
'td',
'th',
'h1',
'h2',
'h3',
'h4',
'h5',
'h6',
]
ALLOWED_ATTRIBUTES = {
'a': ['href', 'title'],
'abbr': ['title'],
'acronym': ['title'],
'table': ['width'],
'td': ['width', 'align'],
}
@register.filter
def rich_text(text: str, **kwargs):
"""
Processes markdown and cleans HTML in a text input.
"""
if not text:
return ""
body_md = bleach.linkify(bleach.clean(markdown.markdown(str(text)),
tags=ALLOWED_TAGS, attributes=ALLOWED_ATTRIBUTES))
return mark_safe(body_md)
|
|
c9c230c2da15fcef5352a67018f43a445caa5c35
|
zerver/migrations/0409_set_default_for_can_remove_subscribers_group.py
|
zerver/migrations/0409_set_default_for_can_remove_subscribers_group.py
|
# Generated by Django 3.2.13 on 2022-06-28 12:02
from django.db import migrations
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def set_default_value_for_can_remove_subscribers_group(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
UserGroup = apps.get_model("zerver", "UserGroup")
for realm in Realm.objects.all():
admins_group = UserGroup.objects.get(
name="@role:administrators", realm=realm, is_system_group=True
)
Stream.objects.filter(realm=realm, can_remove_subscribers_group=None).update(
can_remove_subscribers_group=admins_group
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0408_stream_can_remove_subscribers_group"),
]
operations = [
migrations.RunPython(
set_default_value_for_can_remove_subscribers_group,
elidable=True,
reverse_code=migrations.RunPython.noop,
),
]
|
Add migration to set default of can_remove_subscribers_group.
|
migrations: Add migration to set default of can_remove_subscribers_group.
This migration sets can_remove_subscribers_group value to admins system
group for all the existing streams. In further commit we would change
can_remove_subscribers_group to be not null and thus we add this migration
to ensure all existing streams have this setting value set.
|
Python
|
apache-2.0
|
rht/zulip,zulip/zulip,rht/zulip,andersk/zulip,andersk/zulip,zulip/zulip,andersk/zulip,rht/zulip,andersk/zulip,rht/zulip,rht/zulip,rht/zulip,zulip/zulip,zulip/zulip,zulip/zulip,rht/zulip,andersk/zulip,zulip/zulip,zulip/zulip,andersk/zulip,andersk/zulip
|
migrations: Add migration to set default of can_remove_subscribers_group.
This migration sets can_remove_subscribers_group value to admins system
group for all the existing streams. In further commit we would change
can_remove_subscribers_group to be not null and thus we add this migration
to ensure all existing streams have this setting value set.
|
# Generated by Django 3.2.13 on 2022-06-28 12:02
from django.db import migrations
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def set_default_value_for_can_remove_subscribers_group(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
UserGroup = apps.get_model("zerver", "UserGroup")
for realm in Realm.objects.all():
admins_group = UserGroup.objects.get(
name="@role:administrators", realm=realm, is_system_group=True
)
Stream.objects.filter(realm=realm, can_remove_subscribers_group=None).update(
can_remove_subscribers_group=admins_group
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0408_stream_can_remove_subscribers_group"),
]
operations = [
migrations.RunPython(
set_default_value_for_can_remove_subscribers_group,
elidable=True,
reverse_code=migrations.RunPython.noop,
),
]
|
<commit_before><commit_msg>migrations: Add migration to set default of can_remove_subscribers_group.
This migration sets can_remove_subscribers_group value to admins system
group for all the existing streams. In further commit we would change
can_remove_subscribers_group to be not null and thus we add this migration
to ensure all existing streams have this setting value set.<commit_after>
|
# Generated by Django 3.2.13 on 2022-06-28 12:02
from django.db import migrations
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def set_default_value_for_can_remove_subscribers_group(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
UserGroup = apps.get_model("zerver", "UserGroup")
for realm in Realm.objects.all():
admins_group = UserGroup.objects.get(
name="@role:administrators", realm=realm, is_system_group=True
)
Stream.objects.filter(realm=realm, can_remove_subscribers_group=None).update(
can_remove_subscribers_group=admins_group
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0408_stream_can_remove_subscribers_group"),
]
operations = [
migrations.RunPython(
set_default_value_for_can_remove_subscribers_group,
elidable=True,
reverse_code=migrations.RunPython.noop,
),
]
|
migrations: Add migration to set default of can_remove_subscribers_group.
This migration sets can_remove_subscribers_group value to admins system
group for all the existing streams. In further commit we would change
can_remove_subscribers_group to be not null and thus we add this migration
to ensure all existing streams have this setting value set.# Generated by Django 3.2.13 on 2022-06-28 12:02
from django.db import migrations
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def set_default_value_for_can_remove_subscribers_group(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
UserGroup = apps.get_model("zerver", "UserGroup")
for realm in Realm.objects.all():
admins_group = UserGroup.objects.get(
name="@role:administrators", realm=realm, is_system_group=True
)
Stream.objects.filter(realm=realm, can_remove_subscribers_group=None).update(
can_remove_subscribers_group=admins_group
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0408_stream_can_remove_subscribers_group"),
]
operations = [
migrations.RunPython(
set_default_value_for_can_remove_subscribers_group,
elidable=True,
reverse_code=migrations.RunPython.noop,
),
]
|
<commit_before><commit_msg>migrations: Add migration to set default of can_remove_subscribers_group.
This migration sets can_remove_subscribers_group value to admins system
group for all the existing streams. In further commit we would change
can_remove_subscribers_group to be not null and thus we add this migration
to ensure all existing streams have this setting value set.<commit_after># Generated by Django 3.2.13 on 2022-06-28 12:02
from django.db import migrations
from django.db.backends.postgresql.schema import BaseDatabaseSchemaEditor
from django.db.migrations.state import StateApps
def set_default_value_for_can_remove_subscribers_group(
apps: StateApps, schema_editor: BaseDatabaseSchemaEditor
) -> None:
Stream = apps.get_model("zerver", "Stream")
Realm = apps.get_model("zerver", "Realm")
UserGroup = apps.get_model("zerver", "UserGroup")
for realm in Realm.objects.all():
admins_group = UserGroup.objects.get(
name="@role:administrators", realm=realm, is_system_group=True
)
Stream.objects.filter(realm=realm, can_remove_subscribers_group=None).update(
can_remove_subscribers_group=admins_group
)
class Migration(migrations.Migration):
dependencies = [
("zerver", "0408_stream_can_remove_subscribers_group"),
]
operations = [
migrations.RunPython(
set_default_value_for_can_remove_subscribers_group,
elidable=True,
reverse_code=migrations.RunPython.noop,
),
]
|
|
950246b331c74700e01dc48a86f84bf47d528af3
|
var/spack/repos/builtin/packages/hdf/package.py
|
var/spack/repos/builtin/packages/hdf/package.py
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip@2.1")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
Remove constraint on dependency version
|
Remove constraint on dependency version
|
Python
|
lgpl-2.1
|
TheTimmy/spack,EmreAtes/spack,lgarren/spack,TheTimmy/spack,LLNL/spack,krafczyk/spack,TheTimmy/spack,lgarren/spack,LLNL/spack,LLNL/spack,lgarren/spack,skosukhin/spack,TheTimmy/spack,mfherbst/spack,skosukhin/spack,iulian787/spack,skosukhin/spack,TheTimmy/spack,iulian787/spack,lgarren/spack,skosukhin/spack,matthiasdiener/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,lgarren/spack,skosukhin/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack,krafczyk/spack,tmerrick1/spack,matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,tmerrick1/spack,LLNL/spack,iulian787/spack,EmreAtes/spack,tmerrick1/spack,EmreAtes/spack,tmerrick1/spack,tmerrick1/spack,mfherbst/spack,mfherbst/spack,mfherbst/spack
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip@2.1")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
Remove constraint on dependency version
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
<commit_before>from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip@2.1")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
<commit_msg>Remove constraint on dependency version<commit_after>
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip@2.1")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
Remove constraint on dependency versionfrom spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
<commit_before>from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip@2.1")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
<commit_msg>Remove constraint on dependency version<commit_after>from spack import *
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
homepage = "https://www.hdfgroup.org/products/hdf4/"
url = "https://www.hdfgroup.org/ftp/HDF/releases/HDF4.2.11/src/hdf-4.2.11.tar.gz"
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
depends_on("jpeg")
depends_on("szip")
depends_on("zlib")
def url_for_version(self, version):
return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
'--with-jpeg=%s' % spec['jpeg'].prefix,
'--with-szlib=%s' % spec['szip'].prefix,
'--with-zlib=%s' % spec['zlib'].prefix,
'--disable-netcdf',
'--enable-fortran',
'--disable-shared',
'--enable-static',
'--enable-production')
make()
make("install")
|
3dd7ba37476322a68a5488aab0eabe8f2cf88ec6
|
project/utility/alter.py
|
project/utility/alter.py
|
import jpype
import jpype.imports
jpype.startJVM()
from java.util.jar import JarOutputStream
from java.util.jar import JarInputStream
from java.util.jar import JarFile
from java.util.zip import CRC32
from java.io import File
from java.io import FileInputStream
from java.io import FileOutputStream
jar = JarInputStream(FileInputStream(File("build/lib/org.jpype.jar")))
manifest = jar.getManifest()
target = JarOutputStream(FileOutputStream(File("build/lib/org.jpype2.jar")), manifest)
while 1:
entry=jar.getNextEntry()
if not entry:
break
out = []
l3 = 512
while 1:
bt=jpype.JArray(jpype.JByte)(l3)
l = jar.read(bt, 0, l3)
if l==-1:
break
out.append((l,bt))
if out:
out[0][1][7] = 57
crc = CRC32()
for v in out:
crc.update(v[1],0,v[0])
entry.setCrc(crc.getValue())
entry.setCompressedSize(-1)
target.putNextEntry(entry)
for v in out:
target.write(v[1],0,v[0])
target.closeEntry()
target.close()
|
Remove PyPy 2 as well.
|
Remove PyPy 2 as well.
|
Python
|
apache-2.0
|
originell/jpype,originell/jpype,originell/jpype,originell/jpype,originell/jpype
|
Remove PyPy 2 as well.
|
import jpype
import jpype.imports
jpype.startJVM()
from java.util.jar import JarOutputStream
from java.util.jar import JarInputStream
from java.util.jar import JarFile
from java.util.zip import CRC32
from java.io import File
from java.io import FileInputStream
from java.io import FileOutputStream
jar = JarInputStream(FileInputStream(File("build/lib/org.jpype.jar")))
manifest = jar.getManifest()
target = JarOutputStream(FileOutputStream(File("build/lib/org.jpype2.jar")), manifest)
while 1:
entry=jar.getNextEntry()
if not entry:
break
out = []
l3 = 512
while 1:
bt=jpype.JArray(jpype.JByte)(l3)
l = jar.read(bt, 0, l3)
if l==-1:
break
out.append((l,bt))
if out:
out[0][1][7] = 57
crc = CRC32()
for v in out:
crc.update(v[1],0,v[0])
entry.setCrc(crc.getValue())
entry.setCompressedSize(-1)
target.putNextEntry(entry)
for v in out:
target.write(v[1],0,v[0])
target.closeEntry()
target.close()
|
<commit_before><commit_msg>Remove PyPy 2 as well.<commit_after>
|
import jpype
import jpype.imports
jpype.startJVM()
from java.util.jar import JarOutputStream
from java.util.jar import JarInputStream
from java.util.jar import JarFile
from java.util.zip import CRC32
from java.io import File
from java.io import FileInputStream
from java.io import FileOutputStream
jar = JarInputStream(FileInputStream(File("build/lib/org.jpype.jar")))
manifest = jar.getManifest()
target = JarOutputStream(FileOutputStream(File("build/lib/org.jpype2.jar")), manifest)
while 1:
entry=jar.getNextEntry()
if not entry:
break
out = []
l3 = 512
while 1:
bt=jpype.JArray(jpype.JByte)(l3)
l = jar.read(bt, 0, l3)
if l==-1:
break
out.append((l,bt))
if out:
out[0][1][7] = 57
crc = CRC32()
for v in out:
crc.update(v[1],0,v[0])
entry.setCrc(crc.getValue())
entry.setCompressedSize(-1)
target.putNextEntry(entry)
for v in out:
target.write(v[1],0,v[0])
target.closeEntry()
target.close()
|
Remove PyPy 2 as well.import jpype
import jpype.imports
jpype.startJVM()
from java.util.jar import JarOutputStream
from java.util.jar import JarInputStream
from java.util.jar import JarFile
from java.util.zip import CRC32
from java.io import File
from java.io import FileInputStream
from java.io import FileOutputStream
jar = JarInputStream(FileInputStream(File("build/lib/org.jpype.jar")))
manifest = jar.getManifest()
target = JarOutputStream(FileOutputStream(File("build/lib/org.jpype2.jar")), manifest)
while 1:
entry=jar.getNextEntry()
if not entry:
break
out = []
l3 = 512
while 1:
bt=jpype.JArray(jpype.JByte)(l3)
l = jar.read(bt, 0, l3)
if l==-1:
break
out.append((l,bt))
if out:
out[0][1][7] = 57
crc = CRC32()
for v in out:
crc.update(v[1],0,v[0])
entry.setCrc(crc.getValue())
entry.setCompressedSize(-1)
target.putNextEntry(entry)
for v in out:
target.write(v[1],0,v[0])
target.closeEntry()
target.close()
|
<commit_before><commit_msg>Remove PyPy 2 as well.<commit_after>import jpype
import jpype.imports
jpype.startJVM()
from java.util.jar import JarOutputStream
from java.util.jar import JarInputStream
from java.util.jar import JarFile
from java.util.zip import CRC32
from java.io import File
from java.io import FileInputStream
from java.io import FileOutputStream
jar = JarInputStream(FileInputStream(File("build/lib/org.jpype.jar")))
manifest = jar.getManifest()
target = JarOutputStream(FileOutputStream(File("build/lib/org.jpype2.jar")), manifest)
while 1:
entry=jar.getNextEntry()
if not entry:
break
out = []
l3 = 512
while 1:
bt=jpype.JArray(jpype.JByte)(l3)
l = jar.read(bt, 0, l3)
if l==-1:
break
out.append((l,bt))
if out:
out[0][1][7] = 57
crc = CRC32()
for v in out:
crc.update(v[1],0,v[0])
entry.setCrc(crc.getValue())
entry.setCompressedSize(-1)
target.putNextEntry(entry)
for v in out:
target.write(v[1],0,v[0])
target.closeEntry()
target.close()
|
|
c98c9f11d886885110bf6a832595f5a814fa65b9
|
py/reshape-the-matrix.py
|
py/reshape-the-matrix.py
|
from operator import add
class Solution(object):
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
if len(nums) == 0:
return nums
origR = len(nums)
origC = len(nums[0])
if origR * origC != r * c:
return nums
oneline = reduce(add, nums)
return map(list, zip(*([iter(oneline)] * c)))
|
Add py solution for 566. Reshape the Matrix
|
Add py solution for 566. Reshape the Matrix
566. Reshape the Matrix: https://leetcode.com/problems/reshape-the-matrix/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 566. Reshape the Matrix
566. Reshape the Matrix: https://leetcode.com/problems/reshape-the-matrix/
|
from operator import add
class Solution(object):
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
if len(nums) == 0:
return nums
origR = len(nums)
origC = len(nums[0])
if origR * origC != r * c:
return nums
oneline = reduce(add, nums)
return map(list, zip(*([iter(oneline)] * c)))
|
<commit_before><commit_msg>Add py solution for 566. Reshape the Matrix
566. Reshape the Matrix: https://leetcode.com/problems/reshape-the-matrix/<commit_after>
|
from operator import add
class Solution(object):
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
if len(nums) == 0:
return nums
origR = len(nums)
origC = len(nums[0])
if origR * origC != r * c:
return nums
oneline = reduce(add, nums)
return map(list, zip(*([iter(oneline)] * c)))
|
Add py solution for 566. Reshape the Matrix
566. Reshape the Matrix: https://leetcode.com/problems/reshape-the-matrix/from operator import add
class Solution(object):
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
if len(nums) == 0:
return nums
origR = len(nums)
origC = len(nums[0])
if origR * origC != r * c:
return nums
oneline = reduce(add, nums)
return map(list, zip(*([iter(oneline)] * c)))
|
<commit_before><commit_msg>Add py solution for 566. Reshape the Matrix
566. Reshape the Matrix: https://leetcode.com/problems/reshape-the-matrix/<commit_after>from operator import add
class Solution(object):
def matrixReshape(self, nums, r, c):
"""
:type nums: List[List[int]]
:type r: int
:type c: int
:rtype: List[List[int]]
"""
if len(nums) == 0:
return nums
origR = len(nums)
origC = len(nums[0])
if origR * origC != r * c:
return nums
oneline = reduce(add, nums)
return map(list, zip(*([iter(oneline)] * c)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.