commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ca440877fd1b28157393c480d6fed8f5dd9e4238
|
run_experiment.py
|
run_experiment.py
|
#!/usr/bin/env python
if __name__ == '__main__':
import argparse
from glob import glob
import os.path
from sklearn.feature_extraction.text import CountVectorizer
from termcolor import cprint
import method
import util
parser = argparse.ArgumentParser(description='Run experiment '
'with given settings')
parser.add_argument('normal_thread', type=str,
help='Normal thread directory')
parser.add_argument('oot_thread', type=str,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', type=int, default=None,
help='Number of posts taken from normal_thread')
parser.add_argument('-n', type=int, default=None,
help='Number of posts taken from oot_thread')
parser.add_argument('--method', type=str, default='clust_dist', nargs='*',
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
args = parser.parse_args()
# Obtain normal posts
normfiles = util.pick(glob(os.path.join(args.normal_thread, '*')),
k=args.m, randomized=False)
print('Obtaining', len(normfiles), 'normal posts')
for file in normfiles:
cprint(file, 'green')
# Obtain OOT posts
ootfiles = util.pick(glob(os.path.join(args.oot_thread, '*')), k=args.n)
print('Obtaining', len(ootfiles), 'OOT posts:')
for file in ootfiles:
cprint(file, 'red')
# Combine them both
files = normfiles + ootfiles
truth = [False]*len(normfiles) + [True]*len(ootfiles)
# Apply OOT post detection methods
for meth in args.method:
print('\nApplying', meth, 'method...', end=' ')
if meth == 'txt_comp_dist':
pass
else:
vectorizer = CountVectorizer(input='filename',
stop_words='english')
X = vectorizer.fit_transform(files).toarray()
methodfunc = getattr(method, meth)
res = methodfunc(X)
print('OK')
# Construct ranked list of OOT posts (1: most off-topic)
ranked = reversed(sorted(zip(files, res, truth), key=lambda x: x[1]))
# Print result
for i, (file, score, t) in enumerate(ranked):
txt = '#{:02} {} -> {}'.format(i+1, file, score)
cprint(txt, 'red' if t else 'green')
|
Add script to run experiment
|
Add script to run experiment
|
Python
|
mit
|
kemskems/otdet
|
Add script to run experiment
|
#!/usr/bin/env python
if __name__ == '__main__':
import argparse
from glob import glob
import os.path
from sklearn.feature_extraction.text import CountVectorizer
from termcolor import cprint
import method
import util
parser = argparse.ArgumentParser(description='Run experiment '
'with given settings')
parser.add_argument('normal_thread', type=str,
help='Normal thread directory')
parser.add_argument('oot_thread', type=str,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', type=int, default=None,
help='Number of posts taken from normal_thread')
parser.add_argument('-n', type=int, default=None,
help='Number of posts taken from oot_thread')
parser.add_argument('--method', type=str, default='clust_dist', nargs='*',
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
args = parser.parse_args()
# Obtain normal posts
normfiles = util.pick(glob(os.path.join(args.normal_thread, '*')),
k=args.m, randomized=False)
print('Obtaining', len(normfiles), 'normal posts')
for file in normfiles:
cprint(file, 'green')
# Obtain OOT posts
ootfiles = util.pick(glob(os.path.join(args.oot_thread, '*')), k=args.n)
print('Obtaining', len(ootfiles), 'OOT posts:')
for file in ootfiles:
cprint(file, 'red')
# Combine them both
files = normfiles + ootfiles
truth = [False]*len(normfiles) + [True]*len(ootfiles)
# Apply OOT post detection methods
for meth in args.method:
print('\nApplying', meth, 'method...', end=' ')
if meth == 'txt_comp_dist':
pass
else:
vectorizer = CountVectorizer(input='filename',
stop_words='english')
X = vectorizer.fit_transform(files).toarray()
methodfunc = getattr(method, meth)
res = methodfunc(X)
print('OK')
# Construct ranked list of OOT posts (1: most off-topic)
ranked = reversed(sorted(zip(files, res, truth), key=lambda x: x[1]))
# Print result
for i, (file, score, t) in enumerate(ranked):
txt = '#{:02} {} -> {}'.format(i+1, file, score)
cprint(txt, 'red' if t else 'green')
|
<commit_before><commit_msg>Add script to run experiment<commit_after>
|
#!/usr/bin/env python
if __name__ == '__main__':
import argparse
from glob import glob
import os.path
from sklearn.feature_extraction.text import CountVectorizer
from termcolor import cprint
import method
import util
parser = argparse.ArgumentParser(description='Run experiment '
'with given settings')
parser.add_argument('normal_thread', type=str,
help='Normal thread directory')
parser.add_argument('oot_thread', type=str,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', type=int, default=None,
help='Number of posts taken from normal_thread')
parser.add_argument('-n', type=int, default=None,
help='Number of posts taken from oot_thread')
parser.add_argument('--method', type=str, default='clust_dist', nargs='*',
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
args = parser.parse_args()
# Obtain normal posts
normfiles = util.pick(glob(os.path.join(args.normal_thread, '*')),
k=args.m, randomized=False)
print('Obtaining', len(normfiles), 'normal posts')
for file in normfiles:
cprint(file, 'green')
# Obtain OOT posts
ootfiles = util.pick(glob(os.path.join(args.oot_thread, '*')), k=args.n)
print('Obtaining', len(ootfiles), 'OOT posts:')
for file in ootfiles:
cprint(file, 'red')
# Combine them both
files = normfiles + ootfiles
truth = [False]*len(normfiles) + [True]*len(ootfiles)
# Apply OOT post detection methods
for meth in args.method:
print('\nApplying', meth, 'method...', end=' ')
if meth == 'txt_comp_dist':
pass
else:
vectorizer = CountVectorizer(input='filename',
stop_words='english')
X = vectorizer.fit_transform(files).toarray()
methodfunc = getattr(method, meth)
res = methodfunc(X)
print('OK')
# Construct ranked list of OOT posts (1: most off-topic)
ranked = reversed(sorted(zip(files, res, truth), key=lambda x: x[1]))
# Print result
for i, (file, score, t) in enumerate(ranked):
txt = '#{:02} {} -> {}'.format(i+1, file, score)
cprint(txt, 'red' if t else 'green')
|
Add script to run experiment#!/usr/bin/env python
if __name__ == '__main__':
import argparse
from glob import glob
import os.path
from sklearn.feature_extraction.text import CountVectorizer
from termcolor import cprint
import method
import util
parser = argparse.ArgumentParser(description='Run experiment '
'with given settings')
parser.add_argument('normal_thread', type=str,
help='Normal thread directory')
parser.add_argument('oot_thread', type=str,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', type=int, default=None,
help='Number of posts taken from normal_thread')
parser.add_argument('-n', type=int, default=None,
help='Number of posts taken from oot_thread')
parser.add_argument('--method', type=str, default='clust_dist', nargs='*',
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
args = parser.parse_args()
# Obtain normal posts
normfiles = util.pick(glob(os.path.join(args.normal_thread, '*')),
k=args.m, randomized=False)
print('Obtaining', len(normfiles), 'normal posts')
for file in normfiles:
cprint(file, 'green')
# Obtain OOT posts
ootfiles = util.pick(glob(os.path.join(args.oot_thread, '*')), k=args.n)
print('Obtaining', len(ootfiles), 'OOT posts:')
for file in ootfiles:
cprint(file, 'red')
# Combine them both
files = normfiles + ootfiles
truth = [False]*len(normfiles) + [True]*len(ootfiles)
# Apply OOT post detection methods
for meth in args.method:
print('\nApplying', meth, 'method...', end=' ')
if meth == 'txt_comp_dist':
pass
else:
vectorizer = CountVectorizer(input='filename',
stop_words='english')
X = vectorizer.fit_transform(files).toarray()
methodfunc = getattr(method, meth)
res = methodfunc(X)
print('OK')
# Construct ranked list of OOT posts (1: most off-topic)
ranked = reversed(sorted(zip(files, res, truth), key=lambda x: x[1]))
# Print result
for i, (file, score, t) in enumerate(ranked):
txt = '#{:02} {} -> {}'.format(i+1, file, score)
cprint(txt, 'red' if t else 'green')
|
<commit_before><commit_msg>Add script to run experiment<commit_after>#!/usr/bin/env python
if __name__ == '__main__':
import argparse
from glob import glob
import os.path
from sklearn.feature_extraction.text import CountVectorizer
from termcolor import cprint
import method
import util
parser = argparse.ArgumentParser(description='Run experiment '
'with given settings')
parser.add_argument('normal_thread', type=str,
help='Normal thread directory')
parser.add_argument('oot_thread', type=str,
help='Thread directory from which '
'OOT post will be taken')
parser.add_argument('-m', type=int, default=None,
help='Number of posts taken from normal_thread')
parser.add_argument('-n', type=int, default=None,
help='Number of posts taken from oot_thread')
parser.add_argument('--method', type=str, default='clust_dist', nargs='*',
choices=['clust_dist', 'mean_comp', 'txt_comp_dist'],
help='OOT post detection method to use')
args = parser.parse_args()
# Obtain normal posts
normfiles = util.pick(glob(os.path.join(args.normal_thread, '*')),
k=args.m, randomized=False)
print('Obtaining', len(normfiles), 'normal posts')
for file in normfiles:
cprint(file, 'green')
# Obtain OOT posts
ootfiles = util.pick(glob(os.path.join(args.oot_thread, '*')), k=args.n)
print('Obtaining', len(ootfiles), 'OOT posts:')
for file in ootfiles:
cprint(file, 'red')
# Combine them both
files = normfiles + ootfiles
truth = [False]*len(normfiles) + [True]*len(ootfiles)
# Apply OOT post detection methods
for meth in args.method:
print('\nApplying', meth, 'method...', end=' ')
if meth == 'txt_comp_dist':
pass
else:
vectorizer = CountVectorizer(input='filename',
stop_words='english')
X = vectorizer.fit_transform(files).toarray()
methodfunc = getattr(method, meth)
res = methodfunc(X)
print('OK')
# Construct ranked list of OOT posts (1: most off-topic)
ranked = reversed(sorted(zip(files, res, truth), key=lambda x: x[1]))
# Print result
for i, (file, score, t) in enumerate(ranked):
txt = '#{:02} {} -> {}'.format(i+1, file, score)
cprint(txt, 'red' if t else 'green')
|
|
eb4221e6f0b48b131f2009168b3b22f5a3d4dd8f
|
neutron/extensions/port_resource_request.py
|
neutron/extensions/port_resource_request.py
|
# Copyright (c) 2018 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port_resource_request
from neutron_lib.api import extensions as api_extensions
class Port_resource_request(api_extensions.APIExtensionDescriptor):
api_definition = port_resource_request
|
Introduce Port resource request extension
|
Introduce Port resource request extension
The resources needed by a port are expressed to nova via
resource_request extending the port.
If the port requested by nova boot has resource_request attribute, that
means that placement must enforce the minimum bandwidth requirements.
Change-Id: I82afa56fa784fc43fefd1ce494bff649f51ad1c8
Depends-On: https://review.openstack.org/584903
Partial-Bug: #1578989
See-Also: https://review.openstack.org/502306 (nova spec)
See-Also: https://review.openstack.org/508149 (neutron spec)
|
Python
|
apache-2.0
|
mahak/neutron,noironetworks/neutron,mahak/neutron,openstack/neutron,openstack/neutron,mahak/neutron,noironetworks/neutron,openstack/neutron
|
Introduce Port resource request extension
The resources needed by a port are expressed to nova via
resource_request extending the port.
If the port requested by nova boot has resource_request attribute, that
means that placement must enforce the minimum bandwidth requirements.
Change-Id: I82afa56fa784fc43fefd1ce494bff649f51ad1c8
Depends-On: https://review.openstack.org/584903
Partial-Bug: #1578989
See-Also: https://review.openstack.org/502306 (nova spec)
See-Also: https://review.openstack.org/508149 (neutron spec)
|
# Copyright (c) 2018 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port_resource_request
from neutron_lib.api import extensions as api_extensions
class Port_resource_request(api_extensions.APIExtensionDescriptor):
api_definition = port_resource_request
|
<commit_before><commit_msg>Introduce Port resource request extension
The resources needed by a port are expressed to nova via
resource_request extending the port.
If the port requested by nova boot has resource_request attribute, that
means that placement must enforce the minimum bandwidth requirements.
Change-Id: I82afa56fa784fc43fefd1ce494bff649f51ad1c8
Depends-On: https://review.openstack.org/584903
Partial-Bug: #1578989
See-Also: https://review.openstack.org/502306 (nova spec)
See-Also: https://review.openstack.org/508149 (neutron spec)<commit_after>
|
# Copyright (c) 2018 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port_resource_request
from neutron_lib.api import extensions as api_extensions
class Port_resource_request(api_extensions.APIExtensionDescriptor):
api_definition = port_resource_request
|
Introduce Port resource request extension
The resources needed by a port are expressed to nova via
resource_request extending the port.
If the port requested by nova boot has resource_request attribute, that
means that placement must enforce the minimum bandwidth requirements.
Change-Id: I82afa56fa784fc43fefd1ce494bff649f51ad1c8
Depends-On: https://review.openstack.org/584903
Partial-Bug: #1578989
See-Also: https://review.openstack.org/502306 (nova spec)
See-Also: https://review.openstack.org/508149 (neutron spec)# Copyright (c) 2018 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port_resource_request
from neutron_lib.api import extensions as api_extensions
class Port_resource_request(api_extensions.APIExtensionDescriptor):
api_definition = port_resource_request
|
<commit_before><commit_msg>Introduce Port resource request extension
The resources needed by a port are expressed to nova via
resource_request extending the port.
If the port requested by nova boot has resource_request attribute, that
means that placement must enforce the minimum bandwidth requirements.
Change-Id: I82afa56fa784fc43fefd1ce494bff649f51ad1c8
Depends-On: https://review.openstack.org/584903
Partial-Bug: #1578989
See-Also: https://review.openstack.org/502306 (nova spec)
See-Also: https://review.openstack.org/508149 (neutron spec)<commit_after># Copyright (c) 2018 Ericsson
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import port_resource_request
from neutron_lib.api import extensions as api_extensions
class Port_resource_request(api_extensions.APIExtensionDescriptor):
api_definition = port_resource_request
|
|
fcde68e954eab9f1b158928f9d30633523d41d94
|
corehq/apps/userreports/management/commands/resave_couch_forms_and_cases.py
|
corehq/apps/userreports/management/commands/resave_couch_forms_and_cases.py
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import csv
import datetime
from django.core.management.base import BaseCommand
from corehq.util.couch import IterDB
from corehq.util.log import with_progress_bar
from couchforms.models import XFormInstance
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
help = ("Save a bunch of couch documents so they are re-sent to kafka. "
"Pass in a file with one doc id per line")
def add_arguments(self, parser):
parser.add_argument('ids_file')
def handle(self, ids_file, **options):
with open(ids_file) as f:
doc_ids = [line.strip() for line in f]
db = XFormInstance.get_db() # Both forms and cases are in here
with IterDB(db) as iter_db:
for doc in iter_docs(db, with_progress_bar(doc_ids)):
iter_db.save(doc)
print("{} docs saved".format(len(iter_db.saved_ids)))
print("{} docs errored".format(len(iter_db.error_ids)))
not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
print("{} docs not found".format(not_found))
filename = '_{}.csv'.format(ids_file, datetime.datetime.now())
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['doc_id', 'status'])
for doc_id in doc_ids:
if doc_id in iter_db.saved_ids:
status = "saved"
elif iter_db.error_ids:
status = "errored"
else:
status = "not_found"
writer.writerow([doc_id, status])
print("Saved results to {}".format(filename))
|
Add mgmt cmd to re-save a list of form/case IDs
|
Add mgmt cmd to re-save a list of form/case IDs
https://manage.dimagi.com/default.asp?263644
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
Add mgmt cmd to re-save a list of form/case IDs
https://manage.dimagi.com/default.asp?263644
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import csv
import datetime
from django.core.management.base import BaseCommand
from corehq.util.couch import IterDB
from corehq.util.log import with_progress_bar
from couchforms.models import XFormInstance
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
help = ("Save a bunch of couch documents so they are re-sent to kafka. "
"Pass in a file with one doc id per line")
def add_arguments(self, parser):
parser.add_argument('ids_file')
def handle(self, ids_file, **options):
with open(ids_file) as f:
doc_ids = [line.strip() for line in f]
db = XFormInstance.get_db() # Both forms and cases are in here
with IterDB(db) as iter_db:
for doc in iter_docs(db, with_progress_bar(doc_ids)):
iter_db.save(doc)
print("{} docs saved".format(len(iter_db.saved_ids)))
print("{} docs errored".format(len(iter_db.error_ids)))
not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
print("{} docs not found".format(not_found))
filename = '_{}.csv'.format(ids_file, datetime.datetime.now())
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['doc_id', 'status'])
for doc_id in doc_ids:
if doc_id in iter_db.saved_ids:
status = "saved"
elif iter_db.error_ids:
status = "errored"
else:
status = "not_found"
writer.writerow([doc_id, status])
print("Saved results to {}".format(filename))
|
<commit_before><commit_msg>Add mgmt cmd to re-save a list of form/case IDs
https://manage.dimagi.com/default.asp?263644<commit_after>
|
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import csv
import datetime
from django.core.management.base import BaseCommand
from corehq.util.couch import IterDB
from corehq.util.log import with_progress_bar
from couchforms.models import XFormInstance
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
help = ("Save a bunch of couch documents so they are re-sent to kafka. "
"Pass in a file with one doc id per line")
def add_arguments(self, parser):
parser.add_argument('ids_file')
def handle(self, ids_file, **options):
with open(ids_file) as f:
doc_ids = [line.strip() for line in f]
db = XFormInstance.get_db() # Both forms and cases are in here
with IterDB(db) as iter_db:
for doc in iter_docs(db, with_progress_bar(doc_ids)):
iter_db.save(doc)
print("{} docs saved".format(len(iter_db.saved_ids)))
print("{} docs errored".format(len(iter_db.error_ids)))
not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
print("{} docs not found".format(not_found))
filename = '_{}.csv'.format(ids_file, datetime.datetime.now())
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['doc_id', 'status'])
for doc_id in doc_ids:
if doc_id in iter_db.saved_ids:
status = "saved"
elif iter_db.error_ids:
status = "errored"
else:
status = "not_found"
writer.writerow([doc_id, status])
print("Saved results to {}".format(filename))
|
Add mgmt cmd to re-save a list of form/case IDs
https://manage.dimagi.com/default.asp?263644from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import csv
import datetime
from django.core.management.base import BaseCommand
from corehq.util.couch import IterDB
from corehq.util.log import with_progress_bar
from couchforms.models import XFormInstance
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
help = ("Save a bunch of couch documents so they are re-sent to kafka. "
"Pass in a file with one doc id per line")
def add_arguments(self, parser):
parser.add_argument('ids_file')
def handle(self, ids_file, **options):
with open(ids_file) as f:
doc_ids = [line.strip() for line in f]
db = XFormInstance.get_db() # Both forms and cases are in here
with IterDB(db) as iter_db:
for doc in iter_docs(db, with_progress_bar(doc_ids)):
iter_db.save(doc)
print("{} docs saved".format(len(iter_db.saved_ids)))
print("{} docs errored".format(len(iter_db.error_ids)))
not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
print("{} docs not found".format(not_found))
filename = '_{}.csv'.format(ids_file, datetime.datetime.now())
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['doc_id', 'status'])
for doc_id in doc_ids:
if doc_id in iter_db.saved_ids:
status = "saved"
elif iter_db.error_ids:
status = "errored"
else:
status = "not_found"
writer.writerow([doc_id, status])
print("Saved results to {}".format(filename))
|
<commit_before><commit_msg>Add mgmt cmd to re-save a list of form/case IDs
https://manage.dimagi.com/default.asp?263644<commit_after>from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import csv
import datetime
from django.core.management.base import BaseCommand
from corehq.util.couch import IterDB
from corehq.util.log import with_progress_bar
from couchforms.models import XFormInstance
from dimagi.utils.couch.database import iter_docs
class Command(BaseCommand):
help = ("Save a bunch of couch documents so they are re-sent to kafka. "
"Pass in a file with one doc id per line")
def add_arguments(self, parser):
parser.add_argument('ids_file')
def handle(self, ids_file, **options):
with open(ids_file) as f:
doc_ids = [line.strip() for line in f]
db = XFormInstance.get_db() # Both forms and cases are in here
with IterDB(db) as iter_db:
for doc in iter_docs(db, with_progress_bar(doc_ids)):
iter_db.save(doc)
print("{} docs saved".format(len(iter_db.saved_ids)))
print("{} docs errored".format(len(iter_db.error_ids)))
not_found = len(doc_ids) - len(iter_db.saved_ids) - len(iter_db.error_ids)
print("{} docs not found".format(not_found))
filename = '_{}.csv'.format(ids_file, datetime.datetime.now())
with open(filename, 'w') as f:
writer = csv.writer(f)
writer.writerow(['doc_id', 'status'])
for doc_id in doc_ids:
if doc_id in iter_db.saved_ids:
status = "saved"
elif iter_db.error_ids:
status = "errored"
else:
status = "not_found"
writer.writerow([doc_id, status])
print("Saved results to {}".format(filename))
|
|
dc1118525e47854d8185d4d46db5a12ec3e93aea
|
examples/post_body_example.py
|
examples/post_body_example.py
|
'''A simple post reading server example.
To test run this server with `hug -f post_body_example`
then run the following from ipython:
import requests
requests.post('http://localhost:8000.post_here', json={'one': 'two'}).json()
This should return back the json data that you posted
'''
import hug
@hug.post()
def post_here(body):
'''This example shows how to read in post data w/ hug outside of its automatic param parsing'''
return body
|
Add example post body processer
|
Add example post body processer
|
Python
|
mit
|
MuhammadAlkarouri/hug,timothycrosley/hug,timothycrosley/hug,MuhammadAlkarouri/hug,timothycrosley/hug,MuhammadAlkarouri/hug
|
Add example post body processer
|
'''A simple post reading server example.
To test run this server with `hug -f post_body_example`
then run the following from ipython:
import requests
requests.post('http://localhost:8000.post_here', json={'one': 'two'}).json()
This should return back the json data that you posted
'''
import hug
@hug.post()
def post_here(body):
'''This example shows how to read in post data w/ hug outside of its automatic param parsing'''
return body
|
<commit_before><commit_msg>Add example post body processer<commit_after>
|
'''A simple post reading server example.
To test run this server with `hug -f post_body_example`
then run the following from ipython:
import requests
requests.post('http://localhost:8000.post_here', json={'one': 'two'}).json()
This should return back the json data that you posted
'''
import hug
@hug.post()
def post_here(body):
'''This example shows how to read in post data w/ hug outside of its automatic param parsing'''
return body
|
Add example post body processer'''A simple post reading server example.
To test run this server with `hug -f post_body_example`
then run the following from ipython:
import requests
requests.post('http://localhost:8000.post_here', json={'one': 'two'}).json()
This should return back the json data that you posted
'''
import hug
@hug.post()
def post_here(body):
'''This example shows how to read in post data w/ hug outside of its automatic param parsing'''
return body
|
<commit_before><commit_msg>Add example post body processer<commit_after>'''A simple post reading server example.
To test run this server with `hug -f post_body_example`
then run the following from ipython:
import requests
requests.post('http://localhost:8000.post_here', json={'one': 'two'}).json()
This should return back the json data that you posted
'''
import hug
@hug.post()
def post_here(body):
'''This example shows how to read in post data w/ hug outside of its automatic param parsing'''
return body
|
|
e30ce1adf4e9c8504ae1816f0078c744e63c836c
|
scripts/save-vimt.py
|
scripts/save-vimt.py
|
import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import numpy as np
import xray
import pandas as pd
import matplotlib.pyplot as plt
import atmos as atm
import merra
# ----------------------------------------------------------------------
# Daily vertically integrated moisture transport cf. Fasullo and Webster 2003
def savefile(year, mon):
savedir = atm.homedir() + 'datastore/merra/daily/'
filn = savedir + 'merra_vimt_%d%02d.nc' % (year, mon)
print('Saving to ' + filn)
return filn
lon1, lon2 = 40, 100
lat1, lat2 = -20, 30
varlist = ['u', 'v', 'q']
pmin = 300e2
years = range(1979, 2015)
months = [5, 6, 7, 8]
for year in years:
for mon in months:
dayvals = atm.season_days(atm.month_str(mon), atm.isleap(year))
ds = merra.read_daily(varlist, year, mon, subset1=('lon', lon1, lon2),
subset2=('lat', lat1, lat2))
uq = ds['U'] * ds['QV']
vq = ds['V'] * ds['QV']
# Daily means of 3-hourly data
nperday = 8
uq = atm.daily_from_subdaily(uq, nperday, dayvals=dayvals)
vq = atm.daily_from_subdaily(vq, nperday, dayvals=dayvals)
# Vertical integral
uq_int = atm.int_pres(uq, pmin=pmin)
vq_int = atm.int_pres(vq, pmin=pmin)
uq_int.name='uq_int'
uq_int.attrs['pmin'] = pmin
vq_int.name='vq_int'
vq_int.attrs['pmin'] = pmin
# Save to file
atm.save_nc(savefile(year, mon), uq_int, vq_int)
|
Save daily vertically integrated moisture transport for HOWI index
|
Save daily vertically integrated moisture transport for HOWI index
|
Python
|
mit
|
jenfly/monsoon-onset,jenfly/monsoon-onset
|
Save daily vertically integrated moisture transport for HOWI index
|
import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import numpy as np
import xray
import pandas as pd
import matplotlib.pyplot as plt
import atmos as atm
import merra
# ----------------------------------------------------------------------
# Daily vertically integrated moisture transport cf. Fasullo and Webster 2003
def savefile(year, mon):
savedir = atm.homedir() + 'datastore/merra/daily/'
filn = savedir + 'merra_vimt_%d%02d.nc' % (year, mon)
print('Saving to ' + filn)
return filn
lon1, lon2 = 40, 100
lat1, lat2 = -20, 30
varlist = ['u', 'v', 'q']
pmin = 300e2
years = range(1979, 2015)
months = [5, 6, 7, 8]
for year in years:
for mon in months:
dayvals = atm.season_days(atm.month_str(mon), atm.isleap(year))
ds = merra.read_daily(varlist, year, mon, subset1=('lon', lon1, lon2),
subset2=('lat', lat1, lat2))
uq = ds['U'] * ds['QV']
vq = ds['V'] * ds['QV']
# Daily means of 3-hourly data
nperday = 8
uq = atm.daily_from_subdaily(uq, nperday, dayvals=dayvals)
vq = atm.daily_from_subdaily(vq, nperday, dayvals=dayvals)
# Vertical integral
uq_int = atm.int_pres(uq, pmin=pmin)
vq_int = atm.int_pres(vq, pmin=pmin)
uq_int.name='uq_int'
uq_int.attrs['pmin'] = pmin
vq_int.name='vq_int'
vq_int.attrs['pmin'] = pmin
# Save to file
atm.save_nc(savefile(year, mon), uq_int, vq_int)
|
<commit_before><commit_msg>Save daily vertically integrated moisture transport for HOWI index<commit_after>
|
import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import numpy as np
import xray
import pandas as pd
import matplotlib.pyplot as plt
import atmos as atm
import merra
# ----------------------------------------------------------------------
# Daily vertically integrated moisture transport cf. Fasullo and Webster 2003
def savefile(year, mon):
savedir = atm.homedir() + 'datastore/merra/daily/'
filn = savedir + 'merra_vimt_%d%02d.nc' % (year, mon)
print('Saving to ' + filn)
return filn
lon1, lon2 = 40, 100
lat1, lat2 = -20, 30
varlist = ['u', 'v', 'q']
pmin = 300e2
years = range(1979, 2015)
months = [5, 6, 7, 8]
for year in years:
for mon in months:
dayvals = atm.season_days(atm.month_str(mon), atm.isleap(year))
ds = merra.read_daily(varlist, year, mon, subset1=('lon', lon1, lon2),
subset2=('lat', lat1, lat2))
uq = ds['U'] * ds['QV']
vq = ds['V'] * ds['QV']
# Daily means of 3-hourly data
nperday = 8
uq = atm.daily_from_subdaily(uq, nperday, dayvals=dayvals)
vq = atm.daily_from_subdaily(vq, nperday, dayvals=dayvals)
# Vertical integral
uq_int = atm.int_pres(uq, pmin=pmin)
vq_int = atm.int_pres(vq, pmin=pmin)
uq_int.name='uq_int'
uq_int.attrs['pmin'] = pmin
vq_int.name='vq_int'
vq_int.attrs['pmin'] = pmin
# Save to file
atm.save_nc(savefile(year, mon), uq_int, vq_int)
|
Save daily vertically integrated moisture transport for HOWI indeximport sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import numpy as np
import xray
import pandas as pd
import matplotlib.pyplot as plt
import atmos as atm
import merra
# ----------------------------------------------------------------------
# Daily vertically integrated moisture transport cf. Fasullo and Webster 2003
def savefile(year, mon):
savedir = atm.homedir() + 'datastore/merra/daily/'
filn = savedir + 'merra_vimt_%d%02d.nc' % (year, mon)
print('Saving to ' + filn)
return filn
lon1, lon2 = 40, 100
lat1, lat2 = -20, 30
varlist = ['u', 'v', 'q']
pmin = 300e2
years = range(1979, 2015)
months = [5, 6, 7, 8]
for year in years:
for mon in months:
dayvals = atm.season_days(atm.month_str(mon), atm.isleap(year))
ds = merra.read_daily(varlist, year, mon, subset1=('lon', lon1, lon2),
subset2=('lat', lat1, lat2))
uq = ds['U'] * ds['QV']
vq = ds['V'] * ds['QV']
# Daily means of 3-hourly data
nperday = 8
uq = atm.daily_from_subdaily(uq, nperday, dayvals=dayvals)
vq = atm.daily_from_subdaily(vq, nperday, dayvals=dayvals)
# Vertical integral
uq_int = atm.int_pres(uq, pmin=pmin)
vq_int = atm.int_pres(vq, pmin=pmin)
uq_int.name='uq_int'
uq_int.attrs['pmin'] = pmin
vq_int.name='vq_int'
vq_int.attrs['pmin'] = pmin
# Save to file
atm.save_nc(savefile(year, mon), uq_int, vq_int)
|
<commit_before><commit_msg>Save daily vertically integrated moisture transport for HOWI index<commit_after>import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import numpy as np
import xray
import pandas as pd
import matplotlib.pyplot as plt
import atmos as atm
import merra
# ----------------------------------------------------------------------
# Daily vertically integrated moisture transport cf. Fasullo and Webster 2003
def savefile(year, mon):
savedir = atm.homedir() + 'datastore/merra/daily/'
filn = savedir + 'merra_vimt_%d%02d.nc' % (year, mon)
print('Saving to ' + filn)
return filn
lon1, lon2 = 40, 100
lat1, lat2 = -20, 30
varlist = ['u', 'v', 'q']
pmin = 300e2
years = range(1979, 2015)
months = [5, 6, 7, 8]
for year in years:
for mon in months:
dayvals = atm.season_days(atm.month_str(mon), atm.isleap(year))
ds = merra.read_daily(varlist, year, mon, subset1=('lon', lon1, lon2),
subset2=('lat', lat1, lat2))
uq = ds['U'] * ds['QV']
vq = ds['V'] * ds['QV']
# Daily means of 3-hourly data
nperday = 8
uq = atm.daily_from_subdaily(uq, nperday, dayvals=dayvals)
vq = atm.daily_from_subdaily(vq, nperday, dayvals=dayvals)
# Vertical integral
uq_int = atm.int_pres(uq, pmin=pmin)
vq_int = atm.int_pres(vq, pmin=pmin)
uq_int.name='uq_int'
uq_int.attrs['pmin'] = pmin
vq_int.name='vq_int'
vq_int.attrs['pmin'] = pmin
# Save to file
atm.save_nc(savefile(year, mon), uq_int, vq_int)
|
|
d708b64dd45b824bada255054df3988efa0f21fe
|
migrations/versions/0361_new_user_bcast_permissions.py
|
migrations/versions/0361_new_user_bcast_permissions.py
|
"""
Revision ID: 0361_new_user_bcast_permissions
Revises: 0360_remove_sched_notifications
Create Date: 2021-06-30 11:42:32.780734
"""
from alembic import op
revision = '0361_new_user_bcast_permissions'
down_revision = '0360_remove_sched_notifications'
def upgrade():
"""
Delete all permissions for broadcast service users and invited pending users, apart from 'view_activity'
which they always have.
"""
op.execute(
"DELETE FROM permissions WHERE permission != 'view_activity' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
op.execute(
"UPDATE invited_users SET permissions = 'view_activity' WHERE status = 'pending' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
def downgrade():
"""
This change cannot be downgraded since we no longer have access to the original permissions users had.
"""
|
Remove original permissions from broadcast users
|
Remove original permissions from broadcast users
The broadcast user permissions are changing, so to avoid confusion and
permissions which exist in the database but don't display on the
frontend we are going to remove all existing permissions for users of
broadcast services. This also updates the permissions of invited users
who are still pending.
The exception to this is the `view_activity` permission, which we always
add for broadcast users even if they have no other permissions.
(https://github.com/alphagov/notifications-admin/blob/aad017a184d314d9d192f4b9d6485915f83217c1/app/main/forms.py#L1043)
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Remove original permissions from broadcast users
The broadcast user permissions are changing, so to avoid confusion and
permissions which exist in the database but don't display on the
frontend we are going to remove all existing permissions for users of
broadcast services. This also updates the permissions of invited users
who are still pending.
The exception to this is the `view_activity` permission, which we always
add for broadcast users even if they have no other permissions.
(https://github.com/alphagov/notifications-admin/blob/aad017a184d314d9d192f4b9d6485915f83217c1/app/main/forms.py#L1043)
|
"""
Revision ID: 0361_new_user_bcast_permissions
Revises: 0360_remove_sched_notifications
Create Date: 2021-06-30 11:42:32.780734
"""
from alembic import op
revision = '0361_new_user_bcast_permissions'
down_revision = '0360_remove_sched_notifications'
def upgrade():
"""
Delete all permissions for broadcast service users and invited pending users, apart from 'view_activity'
which they always have.
"""
op.execute(
"DELETE FROM permissions WHERE permission != 'view_activity' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
op.execute(
"UPDATE invited_users SET permissions = 'view_activity' WHERE status = 'pending' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
def downgrade():
"""
This change cannot be downgraded since we no longer have access to the original permissions users had.
"""
|
<commit_before><commit_msg>Remove original permissions from broadcast users
The broadcast user permissions are changing, so to avoid confusion and
permissions which exist in the database but don't display on the
frontend we are going to remove all existing permissions for users of
broadcast services. This also updates the permissions of invited users
who are still pending.
The exception to this is the `view_activity` permission, which we always
add for broadcast users even if they have no other permissions.
(https://github.com/alphagov/notifications-admin/blob/aad017a184d314d9d192f4b9d6485915f83217c1/app/main/forms.py#L1043)<commit_after>
|
"""
Revision ID: 0361_new_user_bcast_permissions
Revises: 0360_remove_sched_notifications
Create Date: 2021-06-30 11:42:32.780734
"""
from alembic import op
revision = '0361_new_user_bcast_permissions'
down_revision = '0360_remove_sched_notifications'
def upgrade():
"""
Delete all permissions for broadcast service users and invited pending users, apart from 'view_activity'
which they always have.
"""
op.execute(
"DELETE FROM permissions WHERE permission != 'view_activity' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
op.execute(
"UPDATE invited_users SET permissions = 'view_activity' WHERE status = 'pending' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
def downgrade():
"""
This change cannot be downgraded since we no longer have access to the original permissions users had.
"""
|
Remove original permissions from broadcast users
The broadcast user permissions are changing, so to avoid confusion and
permissions which exist in the database but don't display on the
frontend we are going to remove all existing permissions for users of
broadcast services. This also updates the permissions of invited users
who are still pending.
The exception to this is the `view_activity` permission, which we always
add for broadcast users even if they have no other permissions.
(https://github.com/alphagov/notifications-admin/blob/aad017a184d314d9d192f4b9d6485915f83217c1/app/main/forms.py#L1043)"""
Revision ID: 0361_new_user_bcast_permissions
Revises: 0360_remove_sched_notifications
Create Date: 2021-06-30 11:42:32.780734
"""
from alembic import op
revision = '0361_new_user_bcast_permissions'
down_revision = '0360_remove_sched_notifications'
def upgrade():
"""
Delete all permissions for broadcast service users and invited pending users, apart from 'view_activity'
which they always have.
"""
op.execute(
"DELETE FROM permissions WHERE permission != 'view_activity' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
op.execute(
"UPDATE invited_users SET permissions = 'view_activity' WHERE status = 'pending' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
def downgrade():
"""
This change cannot be downgraded since we no longer have access to the original permissions users had.
"""
|
<commit_before><commit_msg>Remove original permissions from broadcast users
The broadcast user permissions are changing, so to avoid confusion and
permissions which exist in the database but don't display on the
frontend we are going to remove all existing permissions for users of
broadcast services. This also updates the permissions of invited users
who are still pending.
The exception to this is the `view_activity` permission, which we always
add for broadcast users even if they have no other permissions.
(https://github.com/alphagov/notifications-admin/blob/aad017a184d314d9d192f4b9d6485915f83217c1/app/main/forms.py#L1043)<commit_after>"""
Revision ID: 0361_new_user_bcast_permissions
Revises: 0360_remove_sched_notifications
Create Date: 2021-06-30 11:42:32.780734
"""
from alembic import op
revision = '0361_new_user_bcast_permissions'
down_revision = '0360_remove_sched_notifications'
def upgrade():
"""
Delete all permissions for broadcast service users and invited pending users, apart from 'view_activity'
which they always have.
"""
op.execute(
"DELETE FROM permissions WHERE permission != 'view_activity' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
op.execute(
"UPDATE invited_users SET permissions = 'view_activity' WHERE status = 'pending' "
"and service_id in (select id from services where organisation_id = '38e4bf69-93b0-445d-acee-53ea53fe02df')"
)
def downgrade():
"""
This change cannot be downgraded since we no longer have access to the original permissions users had.
"""
|
|
6b9265c96adb8ba9fc7f2d04edcb02398566e1b9
|
pre_commit_hooks/check_json.py
|
pre_commit_hooks/check_json.py
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json encode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json decode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
Fix JSON decode error message
|
Fix JSON decode error message
|
Python
|
mit
|
Harwood/pre-commit-hooks,pre-commit/pre-commit-hooks
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json encode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
Fix JSON decode error message
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json decode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
<commit_before>from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json encode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
<commit_msg>Fix JSON decode error message<commit_after>
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json decode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json encode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
Fix JSON decode error messagefrom __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json decode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
<commit_before>from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json encode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
<commit_msg>Fix JSON decode error message<commit_after>from __future__ import print_function
import argparse
import sys
import simplejson
def check_json(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='JSON filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
simplejson.load(open(filename))
except (simplejson.JSONDecodeError, UnicodeDecodeError) as exc:
print('{0}: Failed to json decode ({1})'.format(filename, exc))
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_json())
|
84633246eebe20504a8660f1cf3f62c7866a2088
|
scripts/set-artist-streamable.py
|
scripts/set-artist-streamable.py
|
#!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
|
Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)
|
Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)
|
Python
|
agpl-3.0
|
foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm
|
Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)
|
#!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
|
<commit_before><commit_msg>Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)<commit_after>
|
#!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
|
Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)#!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
|
<commit_before><commit_msg>Add script for updating previously imported artists with the streamable property (so we don't have to query tracks to find out who's streamable)<commit_after>#!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
|
|
32ea27cfa3994984d8d8f8db09522f6c31b0524f
|
every_election/apps/organisations/migrations/0059_add_sennedd_to_org_types.py
|
every_election/apps/organisations/migrations/0059_add_sennedd_to_org_types.py
|
# Generated by Django 2.2.16 on 2020-12-18 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("organisations", "0058_remove_division_organisation_fk"),
]
operations = [
migrations.AlterField(
model_name="organisation",
name="organisation_type",
field=models.CharField(
choices=[
("combined-authority", "combined-authority"),
("sp", "sp"),
("gla", "gla"),
("local-authority", "local-authority"),
("naw", "naw"),
("senedd", "senedd"),
("nia", "nia"),
("parl", "parl"),
("police-area", "police-area"),
("sp", "sp"),
("europarl", "europarl"),
],
default="local-authority",
max_length=255,
),
),
]
|
Add 'senedd' to choices for organisation_type
|
Add 'senedd' to choices for organisation_type
|
Python
|
bsd-3-clause
|
DemocracyClub/EveryElection,DemocracyClub/EveryElection,DemocracyClub/EveryElection
|
Add 'senedd' to choices for organisation_type
|
# Generated by Django 2.2.16 on 2020-12-18 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("organisations", "0058_remove_division_organisation_fk"),
]
operations = [
migrations.AlterField(
model_name="organisation",
name="organisation_type",
field=models.CharField(
choices=[
("combined-authority", "combined-authority"),
("sp", "sp"),
("gla", "gla"),
("local-authority", "local-authority"),
("naw", "naw"),
("senedd", "senedd"),
("nia", "nia"),
("parl", "parl"),
("police-area", "police-area"),
("sp", "sp"),
("europarl", "europarl"),
],
default="local-authority",
max_length=255,
),
),
]
|
<commit_before><commit_msg>Add 'senedd' to choices for organisation_type<commit_after>
|
# Generated by Django 2.2.16 on 2020-12-18 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("organisations", "0058_remove_division_organisation_fk"),
]
operations = [
migrations.AlterField(
model_name="organisation",
name="organisation_type",
field=models.CharField(
choices=[
("combined-authority", "combined-authority"),
("sp", "sp"),
("gla", "gla"),
("local-authority", "local-authority"),
("naw", "naw"),
("senedd", "senedd"),
("nia", "nia"),
("parl", "parl"),
("police-area", "police-area"),
("sp", "sp"),
("europarl", "europarl"),
],
default="local-authority",
max_length=255,
),
),
]
|
Add 'senedd' to choices for organisation_type# Generated by Django 2.2.16 on 2020-12-18 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("organisations", "0058_remove_division_organisation_fk"),
]
operations = [
migrations.AlterField(
model_name="organisation",
name="organisation_type",
field=models.CharField(
choices=[
("combined-authority", "combined-authority"),
("sp", "sp"),
("gla", "gla"),
("local-authority", "local-authority"),
("naw", "naw"),
("senedd", "senedd"),
("nia", "nia"),
("parl", "parl"),
("police-area", "police-area"),
("sp", "sp"),
("europarl", "europarl"),
],
default="local-authority",
max_length=255,
),
),
]
|
<commit_before><commit_msg>Add 'senedd' to choices for organisation_type<commit_after># Generated by Django 2.2.16 on 2020-12-18 09:12
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("organisations", "0058_remove_division_organisation_fk"),
]
operations = [
migrations.AlterField(
model_name="organisation",
name="organisation_type",
field=models.CharField(
choices=[
("combined-authority", "combined-authority"),
("sp", "sp"),
("gla", "gla"),
("local-authority", "local-authority"),
("naw", "naw"),
("senedd", "senedd"),
("nia", "nia"),
("parl", "parl"),
("police-area", "police-area"),
("sp", "sp"),
("europarl", "europarl"),
],
default="local-authority",
max_length=255,
),
),
]
|
|
5d749e09eeef87ad91f2255ab80305a26dd85989
|
html_sanitizer/__main__.py
|
html_sanitizer/__main__.py
|
from __future__ import unicode_literals
import io
import sys
from .sanitizer import Sanitizer
sanitizer = Sanitizer()
if len(sys.argv) > 1:
for filename in sys.argv[1:]:
with io.open(filename) as f:
print(sanitizer.sanitize(f.read()))
else:
print(sanitizer.sanitize(sys.stdin.read()).encode('utf-8'))
|
Allow running with "python -m html_sanitizer"
|
Allow running with "python -m html_sanitizer"
|
Python
|
bsd-3-clause
|
matthiask/html-sanitizer
|
Allow running with "python -m html_sanitizer"
|
from __future__ import unicode_literals
import io
import sys
from .sanitizer import Sanitizer
sanitizer = Sanitizer()
if len(sys.argv) > 1:
for filename in sys.argv[1:]:
with io.open(filename) as f:
print(sanitizer.sanitize(f.read()))
else:
print(sanitizer.sanitize(sys.stdin.read()).encode('utf-8'))
|
<commit_before><commit_msg>Allow running with "python -m html_sanitizer"<commit_after>
|
from __future__ import unicode_literals
import io
import sys
from .sanitizer import Sanitizer
sanitizer = Sanitizer()
if len(sys.argv) > 1:
for filename in sys.argv[1:]:
with io.open(filename) as f:
print(sanitizer.sanitize(f.read()))
else:
print(sanitizer.sanitize(sys.stdin.read()).encode('utf-8'))
|
Allow running with "python -m html_sanitizer"from __future__ import unicode_literals
import io
import sys
from .sanitizer import Sanitizer
sanitizer = Sanitizer()
if len(sys.argv) > 1:
for filename in sys.argv[1:]:
with io.open(filename) as f:
print(sanitizer.sanitize(f.read()))
else:
print(sanitizer.sanitize(sys.stdin.read()).encode('utf-8'))
|
<commit_before><commit_msg>Allow running with "python -m html_sanitizer"<commit_after>from __future__ import unicode_literals
import io
import sys
from .sanitizer import Sanitizer
sanitizer = Sanitizer()
if len(sys.argv) > 1:
for filename in sys.argv[1:]:
with io.open(filename) as f:
print(sanitizer.sanitize(f.read()))
else:
print(sanitizer.sanitize(sys.stdin.read()).encode('utf-8'))
|
|
76ced10ebc0c7ef36924ca4e123ba637a53e0784
|
test_server.py
|
test_server.py
|
import datetime
from flask.ext.testing import TestCase
import mongomock
import mock
from server import app, get_db
class BaseTest(TestCase):
def create_app(self):
app.config['TESTING'] = True
return app
def setUp(self):
self.mongo_patcher = mock.patch('pymongo.MongoClient', mongomock.MongoClient)
self.mongo_patcher.start()
def tearDown(self):
self.mongo_patcher.stop()
class TestGeoNotesApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
NOTE2 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get_empty(self):
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(resp.json, [])
def test_get(self):
_id = get_db().insert(self.NOTE1)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 1)
self.assertEqual(resp.json[0]['id'], str(_id))
def test_get_several(self):
_id1 = get_db().insert(self.NOTE1)
_id2 = get_db().insert(self.NOTE2)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 2)
def test_post_one_error(self):
resp = self.client.post("/geonotes", {})
self.assert400(resp)
resp = self.client.post("/geonotes", {'lat': 9})
self.assert400(resp)
def test_post_one(self):
to_post = {
'text_content': 'test',
'lat': 5,
'lng': 9,
'date': datetime.datetime(2013, 11, 10, 0, 0).isoformat()
}
resp = self.client.post("/geonotes", data=to_post)
self.assertStatus(resp, 201)
self.assertTrue('id' in resp.json)
class TestGeoNoteApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get(self):
id_ = get_db().insert(self.NOTE1)
resp = self.client.get('/geonotes/%s' % id_)
self.assert200(resp)
self.assertEqual(resp.json['id'], str(id_))
|
Add tests for RESTful server
|
Add tests for RESTful server
|
Python
|
bsd-2-clause
|
AAzza/geonaut,AAzza/geonaut
|
Add tests for RESTful server
|
import datetime
from flask.ext.testing import TestCase
import mongomock
import mock
from server import app, get_db
class BaseTest(TestCase):
def create_app(self):
app.config['TESTING'] = True
return app
def setUp(self):
self.mongo_patcher = mock.patch('pymongo.MongoClient', mongomock.MongoClient)
self.mongo_patcher.start()
def tearDown(self):
self.mongo_patcher.stop()
class TestGeoNotesApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
NOTE2 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get_empty(self):
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(resp.json, [])
def test_get(self):
_id = get_db().insert(self.NOTE1)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 1)
self.assertEqual(resp.json[0]['id'], str(_id))
def test_get_several(self):
_id1 = get_db().insert(self.NOTE1)
_id2 = get_db().insert(self.NOTE2)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 2)
def test_post_one_error(self):
resp = self.client.post("/geonotes", {})
self.assert400(resp)
resp = self.client.post("/geonotes", {'lat': 9})
self.assert400(resp)
def test_post_one(self):
to_post = {
'text_content': 'test',
'lat': 5,
'lng': 9,
'date': datetime.datetime(2013, 11, 10, 0, 0).isoformat()
}
resp = self.client.post("/geonotes", data=to_post)
self.assertStatus(resp, 201)
self.assertTrue('id' in resp.json)
class TestGeoNoteApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get(self):
id_ = get_db().insert(self.NOTE1)
resp = self.client.get('/geonotes/%s' % id_)
self.assert200(resp)
self.assertEqual(resp.json['id'], str(id_))
|
<commit_before><commit_msg>Add tests for RESTful server<commit_after>
|
import datetime
from flask.ext.testing import TestCase
import mongomock
import mock
from server import app, get_db
class BaseTest(TestCase):
def create_app(self):
app.config['TESTING'] = True
return app
def setUp(self):
self.mongo_patcher = mock.patch('pymongo.MongoClient', mongomock.MongoClient)
self.mongo_patcher.start()
def tearDown(self):
self.mongo_patcher.stop()
class TestGeoNotesApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
NOTE2 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get_empty(self):
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(resp.json, [])
def test_get(self):
_id = get_db().insert(self.NOTE1)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 1)
self.assertEqual(resp.json[0]['id'], str(_id))
def test_get_several(self):
_id1 = get_db().insert(self.NOTE1)
_id2 = get_db().insert(self.NOTE2)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 2)
def test_post_one_error(self):
resp = self.client.post("/geonotes", {})
self.assert400(resp)
resp = self.client.post("/geonotes", {'lat': 9})
self.assert400(resp)
def test_post_one(self):
to_post = {
'text_content': 'test',
'lat': 5,
'lng': 9,
'date': datetime.datetime(2013, 11, 10, 0, 0).isoformat()
}
resp = self.client.post("/geonotes", data=to_post)
self.assertStatus(resp, 201)
self.assertTrue('id' in resp.json)
class TestGeoNoteApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get(self):
id_ = get_db().insert(self.NOTE1)
resp = self.client.get('/geonotes/%s' % id_)
self.assert200(resp)
self.assertEqual(resp.json['id'], str(id_))
|
Add tests for RESTful serverimport datetime
from flask.ext.testing import TestCase
import mongomock
import mock
from server import app, get_db
class BaseTest(TestCase):
def create_app(self):
app.config['TESTING'] = True
return app
def setUp(self):
self.mongo_patcher = mock.patch('pymongo.MongoClient', mongomock.MongoClient)
self.mongo_patcher.start()
def tearDown(self):
self.mongo_patcher.stop()
class TestGeoNotesApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
NOTE2 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get_empty(self):
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(resp.json, [])
def test_get(self):
_id = get_db().insert(self.NOTE1)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 1)
self.assertEqual(resp.json[0]['id'], str(_id))
def test_get_several(self):
_id1 = get_db().insert(self.NOTE1)
_id2 = get_db().insert(self.NOTE2)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 2)
def test_post_one_error(self):
resp = self.client.post("/geonotes", {})
self.assert400(resp)
resp = self.client.post("/geonotes", {'lat': 9})
self.assert400(resp)
def test_post_one(self):
to_post = {
'text_content': 'test',
'lat': 5,
'lng': 9,
'date': datetime.datetime(2013, 11, 10, 0, 0).isoformat()
}
resp = self.client.post("/geonotes", data=to_post)
self.assertStatus(resp, 201)
self.assertTrue('id' in resp.json)
class TestGeoNoteApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get(self):
id_ = get_db().insert(self.NOTE1)
resp = self.client.get('/geonotes/%s' % id_)
self.assert200(resp)
self.assertEqual(resp.json['id'], str(id_))
|
<commit_before><commit_msg>Add tests for RESTful server<commit_after>import datetime
from flask.ext.testing import TestCase
import mongomock
import mock
from server import app, get_db
class BaseTest(TestCase):
def create_app(self):
app.config['TESTING'] = True
return app
def setUp(self):
self.mongo_patcher = mock.patch('pymongo.MongoClient', mongomock.MongoClient)
self.mongo_patcher.start()
def tearDown(self):
self.mongo_patcher.stop()
class TestGeoNotesApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
NOTE2 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get_empty(self):
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(resp.json, [])
def test_get(self):
_id = get_db().insert(self.NOTE1)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 1)
self.assertEqual(resp.json[0]['id'], str(_id))
def test_get_several(self):
_id1 = get_db().insert(self.NOTE1)
_id2 = get_db().insert(self.NOTE2)
resp = self.client.get("/geonotes")
self.assert200(resp)
self.assertEquals(len(resp.json), 2)
def test_post_one_error(self):
resp = self.client.post("/geonotes", {})
self.assert400(resp)
resp = self.client.post("/geonotes", {'lat': 9})
self.assert400(resp)
def test_post_one(self):
to_post = {
'text_content': 'test',
'lat': 5,
'lng': 9,
'date': datetime.datetime(2013, 11, 10, 0, 0).isoformat()
}
resp = self.client.post("/geonotes", data=to_post)
self.assertStatus(resp, 201)
self.assertTrue('id' in resp.json)
class TestGeoNoteApi(BaseTest):
NOTE1 = {
'txt': "test",
'lat': 0,
'lng': 0,
'dt': datetime.datetime(2012, 11, 10, 0, 0),
}
def test_get(self):
id_ = get_db().insert(self.NOTE1)
resp = self.client.get('/geonotes/%s' % id_)
self.assert200(resp)
self.assertEqual(resp.json['id'], str(id_))
|
|
fcf6fe09abcb448ec6071f5a92b1d44621fb2a49
|
st2client/tests/unit/test_client_actions.py
|
st2client/tests/unit/test_client_actions.py
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import mock
import unittest2
from tests import base
from st2client import client
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
EXECUTION = {
"id": 12345,
"action": {
"ref": "mock.foobar"
},
"status": "failed",
"result": "non-empty"
}
ENTRYPOINT = (
"version: 1.0"
"description: A basic workflow that runs an arbitrary linux command."
"input:"
" - cmd"
" - timeout"
"tasks:"
" task1:"
" action: core.local cmd=<% ctx(cmd) %> timeout=<% ctx(timeout) %>"
" next:"
" - when: <% succeeded() %>"
" publish:"
" - stdout: <% result().stdout %>"
" - stderr: <% result().stderr %>"
"output:"
" - stdout: <% ctx(stdout) %>"
)
class TestActionResourceManager(unittest2.TestCase):
@classmethod
def setUpClass(cls):
super(TestActionResourceManager, cls).setUpClass()
cls.client = client.Client()
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_ref(self):
self.client.actions.get_entrypoint(EXECUTION['action']['ref'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['action']['ref']
httpclient.HTTPClient.get.assert_called_with(endpoint)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_id(self):
self.client.actions.get_entrypoint(EXECUTION['id'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['id']
httpclient.HTTPClient.get.assert_called_with(endpoint)
|
Add unit tests for the new ActionResourceManager methods
|
Add unit tests for the new ActionResourceManager methods
|
Python
|
apache-2.0
|
nzlosh/st2,nzlosh/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2,Plexxi/st2
|
Add unit tests for the new ActionResourceManager methods
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import mock
import unittest2
from tests import base
from st2client import client
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
EXECUTION = {
"id": 12345,
"action": {
"ref": "mock.foobar"
},
"status": "failed",
"result": "non-empty"
}
ENTRYPOINT = (
"version: 1.0"
"description: A basic workflow that runs an arbitrary linux command."
"input:"
" - cmd"
" - timeout"
"tasks:"
" task1:"
" action: core.local cmd=<% ctx(cmd) %> timeout=<% ctx(timeout) %>"
" next:"
" - when: <% succeeded() %>"
" publish:"
" - stdout: <% result().stdout %>"
" - stderr: <% result().stderr %>"
"output:"
" - stdout: <% ctx(stdout) %>"
)
class TestActionResourceManager(unittest2.TestCase):
@classmethod
def setUpClass(cls):
super(TestActionResourceManager, cls).setUpClass()
cls.client = client.Client()
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_ref(self):
self.client.actions.get_entrypoint(EXECUTION['action']['ref'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['action']['ref']
httpclient.HTTPClient.get.assert_called_with(endpoint)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_id(self):
self.client.actions.get_entrypoint(EXECUTION['id'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['id']
httpclient.HTTPClient.get.assert_called_with(endpoint)
|
<commit_before><commit_msg>Add unit tests for the new ActionResourceManager methods<commit_after>
|
# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import mock
import unittest2
from tests import base
from st2client import client
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
EXECUTION = {
"id": 12345,
"action": {
"ref": "mock.foobar"
},
"status": "failed",
"result": "non-empty"
}
ENTRYPOINT = (
"version: 1.0"
"description: A basic workflow that runs an arbitrary linux command."
"input:"
" - cmd"
" - timeout"
"tasks:"
" task1:"
" action: core.local cmd=<% ctx(cmd) %> timeout=<% ctx(timeout) %>"
" next:"
" - when: <% succeeded() %>"
" publish:"
" - stdout: <% result().stdout %>"
" - stderr: <% result().stderr %>"
"output:"
" - stdout: <% ctx(stdout) %>"
)
class TestActionResourceManager(unittest2.TestCase):
@classmethod
def setUpClass(cls):
super(TestActionResourceManager, cls).setUpClass()
cls.client = client.Client()
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_ref(self):
self.client.actions.get_entrypoint(EXECUTION['action']['ref'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['action']['ref']
httpclient.HTTPClient.get.assert_called_with(endpoint)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_id(self):
self.client.actions.get_entrypoint(EXECUTION['id'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['id']
httpclient.HTTPClient.get.assert_called_with(endpoint)
|
Add unit tests for the new ActionResourceManager methods# Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import mock
import unittest2
from tests import base
from st2client import client
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
EXECUTION = {
"id": 12345,
"action": {
"ref": "mock.foobar"
},
"status": "failed",
"result": "non-empty"
}
ENTRYPOINT = (
"version: 1.0"
"description: A basic workflow that runs an arbitrary linux command."
"input:"
" - cmd"
" - timeout"
"tasks:"
" task1:"
" action: core.local cmd=<% ctx(cmd) %> timeout=<% ctx(timeout) %>"
" next:"
" - when: <% succeeded() %>"
" publish:"
" - stdout: <% result().stdout %>"
" - stderr: <% result().stderr %>"
"output:"
" - stdout: <% ctx(stdout) %>"
)
class TestActionResourceManager(unittest2.TestCase):
@classmethod
def setUpClass(cls):
super(TestActionResourceManager, cls).setUpClass()
cls.client = client.Client()
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_ref(self):
self.client.actions.get_entrypoint(EXECUTION['action']['ref'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['action']['ref']
httpclient.HTTPClient.get.assert_called_with(endpoint)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_id(self):
self.client.actions.get_entrypoint(EXECUTION['id'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['id']
httpclient.HTTPClient.get.assert_called_with(endpoint)
|
<commit_before><commit_msg>Add unit tests for the new ActionResourceManager methods<commit_after># Copyright 2019 Extreme Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
import json
import logging
import mock
import unittest2
from tests import base
from st2client import client
from st2client.utils import httpclient
LOG = logging.getLogger(__name__)
EXECUTION = {
"id": 12345,
"action": {
"ref": "mock.foobar"
},
"status": "failed",
"result": "non-empty"
}
ENTRYPOINT = (
"version: 1.0"
"description: A basic workflow that runs an arbitrary linux command."
"input:"
" - cmd"
" - timeout"
"tasks:"
" task1:"
" action: core.local cmd=<% ctx(cmd) %> timeout=<% ctx(timeout) %>"
" next:"
" - when: <% succeeded() %>"
" publish:"
" - stdout: <% result().stdout %>"
" - stderr: <% result().stderr %>"
"output:"
" - stdout: <% ctx(stdout) %>"
)
class TestActionResourceManager(unittest2.TestCase):
@classmethod
def setUpClass(cls):
super(TestActionResourceManager, cls).setUpClass()
cls.client = client.Client()
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_ref(self):
self.client.actions.get_entrypoint(EXECUTION['action']['ref'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['action']['ref']
httpclient.HTTPClient.get.assert_called_with(endpoint)
@mock.patch.object(
httpclient.HTTPClient, 'get',
mock.MagicMock(return_value=base.FakeResponse(json.dumps(ENTRYPOINT), 200, 'OK')))
def test_get_action_entry_point_by_id(self):
self.client.actions.get_entrypoint(EXECUTION['id'])
endpoint = '/actions/views/entry_point/%s' % EXECUTION['id']
httpclient.HTTPClient.get.assert_called_with(endpoint)
|
|
fa9b27bd8e58b17a2d072fdf2b173f14d5f15a33
|
tests/functional/test_response_shadowing.py
|
tests/functional/test_response_shadowing.py
|
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from nose.tools import assert_false
def _all_services():
session = Session()
service_names = session.get_available_services()
for service_name in service_names:
yield session.get_service_model(service_name)
def _all_operations():
for service_model in _all_services():
for operation_name in service_model.operation_names:
yield service_model.operation_model(operation_name)
def _assert_not_shadowed(key, shape):
if not shape:
return
msg = (
'Found shape "%s" that shadows the botocore response key "%s"'
)
assert_false(key in shape.members, msg % (shape.name, key))
def test_response_metadata_is_not_shadowed():
for operation_model in _all_operations():
shape = operation_model.output_shape
yield _assert_not_shadowed, 'ResponseMetadata', shape
def test_exceptions_do_not_shadow():
for service_model in _all_services():
for shape in service_model.error_shapes:
yield _assert_not_shadowed, 'ResponseMetadata', shape
yield _assert_not_shadowed, 'Error', shape
|
Add functional test to ensure response keys are not shadowed
|
Add functional test to ensure response keys are not shadowed
|
Python
|
apache-2.0
|
boto/botocore,pplu/botocore
|
Add functional test to ensure response keys are not shadowed
|
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from nose.tools import assert_false
def _all_services():
session = Session()
service_names = session.get_available_services()
for service_name in service_names:
yield session.get_service_model(service_name)
def _all_operations():
for service_model in _all_services():
for operation_name in service_model.operation_names:
yield service_model.operation_model(operation_name)
def _assert_not_shadowed(key, shape):
if not shape:
return
msg = (
'Found shape "%s" that shadows the botocore response key "%s"'
)
assert_false(key in shape.members, msg % (shape.name, key))
def test_response_metadata_is_not_shadowed():
for operation_model in _all_operations():
shape = operation_model.output_shape
yield _assert_not_shadowed, 'ResponseMetadata', shape
def test_exceptions_do_not_shadow():
for service_model in _all_services():
for shape in service_model.error_shapes:
yield _assert_not_shadowed, 'ResponseMetadata', shape
yield _assert_not_shadowed, 'Error', shape
|
<commit_before><commit_msg>Add functional test to ensure response keys are not shadowed<commit_after>
|
# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from nose.tools import assert_false
def _all_services():
session = Session()
service_names = session.get_available_services()
for service_name in service_names:
yield session.get_service_model(service_name)
def _all_operations():
for service_model in _all_services():
for operation_name in service_model.operation_names:
yield service_model.operation_model(operation_name)
def _assert_not_shadowed(key, shape):
if not shape:
return
msg = (
'Found shape "%s" that shadows the botocore response key "%s"'
)
assert_false(key in shape.members, msg % (shape.name, key))
def test_response_metadata_is_not_shadowed():
for operation_model in _all_operations():
shape = operation_model.output_shape
yield _assert_not_shadowed, 'ResponseMetadata', shape
def test_exceptions_do_not_shadow():
for service_model in _all_services():
for shape in service_model.error_shapes:
yield _assert_not_shadowed, 'ResponseMetadata', shape
yield _assert_not_shadowed, 'Error', shape
|
Add functional test to ensure response keys are not shadowed# Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from nose.tools import assert_false
def _all_services():
session = Session()
service_names = session.get_available_services()
for service_name in service_names:
yield session.get_service_model(service_name)
def _all_operations():
for service_model in _all_services():
for operation_name in service_model.operation_names:
yield service_model.operation_model(operation_name)
def _assert_not_shadowed(key, shape):
if not shape:
return
msg = (
'Found shape "%s" that shadows the botocore response key "%s"'
)
assert_false(key in shape.members, msg % (shape.name, key))
def test_response_metadata_is_not_shadowed():
for operation_model in _all_operations():
shape = operation_model.output_shape
yield _assert_not_shadowed, 'ResponseMetadata', shape
def test_exceptions_do_not_shadow():
for service_model in _all_services():
for shape in service_model.error_shapes:
yield _assert_not_shadowed, 'ResponseMetadata', shape
yield _assert_not_shadowed, 'Error', shape
|
<commit_before><commit_msg>Add functional test to ensure response keys are not shadowed<commit_after># Copyright 2020 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from botocore.session import Session
from nose.tools import assert_false
def _all_services():
session = Session()
service_names = session.get_available_services()
for service_name in service_names:
yield session.get_service_model(service_name)
def _all_operations():
for service_model in _all_services():
for operation_name in service_model.operation_names:
yield service_model.operation_model(operation_name)
def _assert_not_shadowed(key, shape):
if not shape:
return
msg = (
'Found shape "%s" that shadows the botocore response key "%s"'
)
assert_false(key in shape.members, msg % (shape.name, key))
def test_response_metadata_is_not_shadowed():
for operation_model in _all_operations():
shape = operation_model.output_shape
yield _assert_not_shadowed, 'ResponseMetadata', shape
def test_exceptions_do_not_shadow():
for service_model in _all_services():
for shape in service_model.error_shapes:
yield _assert_not_shadowed, 'ResponseMetadata', shape
yield _assert_not_shadowed, 'Error', shape
|
|
ec34549e7cc0910565709540df4e2d4b81333c43
|
grizli/pipeline/run_MPI.py
|
grizli/pipeline/run_MPI.py
|
"""
Script to run all redshift fits in parallel with OpenMPI
Usage:
mpiexec -n 10 python -m mpi4py.futures $GRIZLICODE/grizli/pipeline/run_MPI.py
where "-n 8" indicates running 8 parallel threads.
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.
"""
import os
import glob
import numpy as np
import drizzlepac
import matplotlib.pyplot as plt
plt.ioff()
from mpi4py.futures import MPIPoolExecutor
from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()
def find_ids():
# Find objects that with extarcted spectra and that need to be fit
all_files=glob.glob('*beams.fits')
files = []
for file in all_files:
if not os.path.exists(file.replace('beams.fits', 'full.fits')):
files.append(file)
print('{0} files to fit'.format(len(files)))
ids = [int(file.split('_')[1].split('.')[0]) for file in files]
return ids
if __name__ == '__main__':
import time
t1 = time.time()
ids = find_ids()
if len(ids) == 0:
exit()
with MPIPoolExecutor() as executor:
res = executor.map(run_all_parallel, ids)
for ix in res:
print(' Done, id={0} / status={1}, t={2:.1f}'.format(ix[0], ix[1], ix[2]))
t2 = time.time()
print('MPIPool: {0:.1f}'.format(t2-t1))
|
Add file for running fits with OpenMPI
|
Add file for running fits with OpenMPI
|
Python
|
mit
|
gbrammer/grizli
|
Add file for running fits with OpenMPI
|
"""
Script to run all redshift fits in parallel with OpenMPI
Usage:
mpiexec -n 10 python -m mpi4py.futures $GRIZLICODE/grizli/pipeline/run_MPI.py
where "-n 8" indicates running 8 parallel threads.
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.
"""
import os
import glob
import numpy as np
import drizzlepac
import matplotlib.pyplot as plt
plt.ioff()
from mpi4py.futures import MPIPoolExecutor
from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()
def find_ids():
# Find objects that with extarcted spectra and that need to be fit
all_files=glob.glob('*beams.fits')
files = []
for file in all_files:
if not os.path.exists(file.replace('beams.fits', 'full.fits')):
files.append(file)
print('{0} files to fit'.format(len(files)))
ids = [int(file.split('_')[1].split('.')[0]) for file in files]
return ids
if __name__ == '__main__':
import time
t1 = time.time()
ids = find_ids()
if len(ids) == 0:
exit()
with MPIPoolExecutor() as executor:
res = executor.map(run_all_parallel, ids)
for ix in res:
print(' Done, id={0} / status={1}, t={2:.1f}'.format(ix[0], ix[1], ix[2]))
t2 = time.time()
print('MPIPool: {0:.1f}'.format(t2-t1))
|
<commit_before><commit_msg>Add file for running fits with OpenMPI<commit_after>
|
"""
Script to run all redshift fits in parallel with OpenMPI
Usage:
mpiexec -n 10 python -m mpi4py.futures $GRIZLICODE/grizli/pipeline/run_MPI.py
where "-n 8" indicates running 8 parallel threads.
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.
"""
import os
import glob
import numpy as np
import drizzlepac
import matplotlib.pyplot as plt
plt.ioff()
from mpi4py.futures import MPIPoolExecutor
from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()
def find_ids():
# Find objects that with extarcted spectra and that need to be fit
all_files=glob.glob('*beams.fits')
files = []
for file in all_files:
if not os.path.exists(file.replace('beams.fits', 'full.fits')):
files.append(file)
print('{0} files to fit'.format(len(files)))
ids = [int(file.split('_')[1].split('.')[0]) for file in files]
return ids
if __name__ == '__main__':
import time
t1 = time.time()
ids = find_ids()
if len(ids) == 0:
exit()
with MPIPoolExecutor() as executor:
res = executor.map(run_all_parallel, ids)
for ix in res:
print(' Done, id={0} / status={1}, t={2:.1f}'.format(ix[0], ix[1], ix[2]))
t2 = time.time()
print('MPIPool: {0:.1f}'.format(t2-t1))
|
Add file for running fits with OpenMPI"""
Script to run all redshift fits in parallel with OpenMPI
Usage:
mpiexec -n 10 python -m mpi4py.futures $GRIZLICODE/grizli/pipeline/run_MPI.py
where "-n 8" indicates running 8 parallel threads.
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.
"""
import os
import glob
import numpy as np
import drizzlepac
import matplotlib.pyplot as plt
plt.ioff()
from mpi4py.futures import MPIPoolExecutor
from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()
def find_ids():
# Find objects that with extarcted spectra and that need to be fit
all_files=glob.glob('*beams.fits')
files = []
for file in all_files:
if not os.path.exists(file.replace('beams.fits', 'full.fits')):
files.append(file)
print('{0} files to fit'.format(len(files)))
ids = [int(file.split('_')[1].split('.')[0]) for file in files]
return ids
if __name__ == '__main__':
import time
t1 = time.time()
ids = find_ids()
if len(ids) == 0:
exit()
with MPIPoolExecutor() as executor:
res = executor.map(run_all_parallel, ids)
for ix in res:
print(' Done, id={0} / status={1}, t={2:.1f}'.format(ix[0], ix[1], ix[2]))
t2 = time.time()
print('MPIPool: {0:.1f}'.format(t2-t1))
|
<commit_before><commit_msg>Add file for running fits with OpenMPI<commit_after>"""
Script to run all redshift fits in parallel with OpenMPI
Usage:
mpiexec -n 10 python -m mpi4py.futures $GRIZLICODE/grizli/pipeline/run_MPI.py
where "-n 8" indicates running 8 parallel threads.
Needs 'fit_args.py' created by `auto_script.generate_fit_params`.
"""
import os
import glob
import numpy as np
import drizzlepac
import matplotlib.pyplot as plt
plt.ioff()
from mpi4py.futures import MPIPoolExecutor
from grizli.fitting import run_all_parallel
from grizli import utils
utils.set_warnings()
def find_ids():
# Find objects that with extarcted spectra and that need to be fit
all_files=glob.glob('*beams.fits')
files = []
for file in all_files:
if not os.path.exists(file.replace('beams.fits', 'full.fits')):
files.append(file)
print('{0} files to fit'.format(len(files)))
ids = [int(file.split('_')[1].split('.')[0]) for file in files]
return ids
if __name__ == '__main__':
import time
t1 = time.time()
ids = find_ids()
if len(ids) == 0:
exit()
with MPIPoolExecutor() as executor:
res = executor.map(run_all_parallel, ids)
for ix in res:
print(' Done, id={0} / status={1}, t={2:.1f}'.format(ix[0], ix[1], ix[2]))
t2 = time.time()
print('MPIPool: {0:.1f}'.format(t2-t1))
|
|
fe3e93a0b74a987fa4545f75f3f6301fc15f571e
|
setuptools/tests/test_build.py
|
setuptools/tests/test_build.py
|
from setuptools.dist import Distribution
from setuptools.command.build import build
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(dict(
script_name='setup.py',
script_args=['build'],
packages=[''],
package_data={'': ['path/*']},
))
assert isinstance(dist.get_command_obj("build"), build)
|
Add a simple test for setuptools.command.build
|
Add a simple test for setuptools.command.build
|
Python
|
mit
|
pypa/setuptools,pypa/setuptools,pypa/setuptools
|
Add a simple test for setuptools.command.build
|
from setuptools.dist import Distribution
from setuptools.command.build import build
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(dict(
script_name='setup.py',
script_args=['build'],
packages=[''],
package_data={'': ['path/*']},
))
assert isinstance(dist.get_command_obj("build"), build)
|
<commit_before><commit_msg>Add a simple test for setuptools.command.build<commit_after>
|
from setuptools.dist import Distribution
from setuptools.command.build import build
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(dict(
script_name='setup.py',
script_args=['build'],
packages=[''],
package_data={'': ['path/*']},
))
assert isinstance(dist.get_command_obj("build"), build)
|
Add a simple test for setuptools.command.buildfrom setuptools.dist import Distribution
from setuptools.command.build import build
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(dict(
script_name='setup.py',
script_args=['build'],
packages=[''],
package_data={'': ['path/*']},
))
assert isinstance(dist.get_command_obj("build"), build)
|
<commit_before><commit_msg>Add a simple test for setuptools.command.build<commit_after>from setuptools.dist import Distribution
from setuptools.command.build import build
def test_distribution_gives_setuptools_build_obj(tmpdir_cwd):
"""
Check that the setuptools Distribution uses the
setuptools specific build object.
"""
dist = Distribution(dict(
script_name='setup.py',
script_args=['build'],
packages=[''],
package_data={'': ['path/*']},
))
assert isinstance(dist.get_command_obj("build"), build)
|
|
8525b50385a8b18333ad557ab66029d467c3e915
|
test/test_core.py
|
test/test_core.py
|
# -*- coding: utf-8 -*-
"""Testing CLI module
Only supports Python 3.x now"""
import unittest
# Resolve import problem
import os
os.chdir("../")
from vocabtool import core
class TestCoreConfig(unittest.TestCase):
"""Test case for the config components of core"""
def setUp(self):
pass
def tearDown(self):
pass
def test_load_config(self):
pass
def test_read_config(self):
pass
def test_write_config(self):
pass
class TestCoreLookUp(unittest.TestCase):
"""Test case for core's components related to looking up words"""
def setUp(self):
pass
def tearDown(self):
pass
def test_lookup_invalid_word(self):
pass
def test_lookup_invalid_language(self):
pass
def test_lookup_empty_source_list(self):
pass
class TestCoreAddToDatabase(unittest.TestCase):
"""Test case for core's components related to adding entries to database"""
def setUp(self):
pass
def tearDown(self):
pass
def test_add_to_database(self):
pass
class TestCoreGenerateLaTeX(unittest.TestCase):
"""Test case for core's components related to generate LaTeX output"""
def setUp(self):
pass
def tearDown(self):
pass
def test_generate(self):
pass
if __name__ == "__main__":
unittest.main()
|
Add preliminary test suite for core.py
|
Add preliminary test suite for core.py
|
Python
|
mit
|
RihanWu/vocabtool
|
Add preliminary test suite for core.py
|
# -*- coding: utf-8 -*-
"""Testing CLI module
Only supports Python 3.x now"""
import unittest
# Resolve import problem
import os
os.chdir("../")
from vocabtool import core
class TestCoreConfig(unittest.TestCase):
"""Test case for the config components of core"""
def setUp(self):
pass
def tearDown(self):
pass
def test_load_config(self):
pass
def test_read_config(self):
pass
def test_write_config(self):
pass
class TestCoreLookUp(unittest.TestCase):
"""Test case for core's components related to looking up words"""
def setUp(self):
pass
def tearDown(self):
pass
def test_lookup_invalid_word(self):
pass
def test_lookup_invalid_language(self):
pass
def test_lookup_empty_source_list(self):
pass
class TestCoreAddToDatabase(unittest.TestCase):
"""Test case for core's components related to adding entries to database"""
def setUp(self):
pass
def tearDown(self):
pass
def test_add_to_database(self):
pass
class TestCoreGenerateLaTeX(unittest.TestCase):
"""Test case for core's components related to generate LaTeX output"""
def setUp(self):
pass
def tearDown(self):
pass
def test_generate(self):
pass
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add preliminary test suite for core.py<commit_after>
|
# -*- coding: utf-8 -*-
"""Testing CLI module
Only supports Python 3.x now"""
import unittest
# Resolve import problem
import os
os.chdir("../")
from vocabtool import core
class TestCoreConfig(unittest.TestCase):
"""Test case for the config components of core"""
def setUp(self):
pass
def tearDown(self):
pass
def test_load_config(self):
pass
def test_read_config(self):
pass
def test_write_config(self):
pass
class TestCoreLookUp(unittest.TestCase):
"""Test case for core's components related to looking up words"""
def setUp(self):
pass
def tearDown(self):
pass
def test_lookup_invalid_word(self):
pass
def test_lookup_invalid_language(self):
pass
def test_lookup_empty_source_list(self):
pass
class TestCoreAddToDatabase(unittest.TestCase):
"""Test case for core's components related to adding entries to database"""
def setUp(self):
pass
def tearDown(self):
pass
def test_add_to_database(self):
pass
class TestCoreGenerateLaTeX(unittest.TestCase):
"""Test case for core's components related to generate LaTeX output"""
def setUp(self):
pass
def tearDown(self):
pass
def test_generate(self):
pass
if __name__ == "__main__":
unittest.main()
|
Add preliminary test suite for core.py# -*- coding: utf-8 -*-
"""Testing CLI module
Only supports Python 3.x now"""
import unittest
# Resolve import problem
import os
os.chdir("../")
from vocabtool import core
class TestCoreConfig(unittest.TestCase):
"""Test case for the config components of core"""
def setUp(self):
pass
def tearDown(self):
pass
def test_load_config(self):
pass
def test_read_config(self):
pass
def test_write_config(self):
pass
class TestCoreLookUp(unittest.TestCase):
"""Test case for core's components related to looking up words"""
def setUp(self):
pass
def tearDown(self):
pass
def test_lookup_invalid_word(self):
pass
def test_lookup_invalid_language(self):
pass
def test_lookup_empty_source_list(self):
pass
class TestCoreAddToDatabase(unittest.TestCase):
"""Test case for core's components related to adding entries to database"""
def setUp(self):
pass
def tearDown(self):
pass
def test_add_to_database(self):
pass
class TestCoreGenerateLaTeX(unittest.TestCase):
"""Test case for core's components related to generate LaTeX output"""
def setUp(self):
pass
def tearDown(self):
pass
def test_generate(self):
pass
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add preliminary test suite for core.py<commit_after># -*- coding: utf-8 -*-
"""Testing CLI module
Only supports Python 3.x now"""
import unittest
# Resolve import problem
import os
os.chdir("../")
from vocabtool import core
class TestCoreConfig(unittest.TestCase):
"""Test case for the config components of core"""
def setUp(self):
pass
def tearDown(self):
pass
def test_load_config(self):
pass
def test_read_config(self):
pass
def test_write_config(self):
pass
class TestCoreLookUp(unittest.TestCase):
"""Test case for core's components related to looking up words"""
def setUp(self):
pass
def tearDown(self):
pass
def test_lookup_invalid_word(self):
pass
def test_lookup_invalid_language(self):
pass
def test_lookup_empty_source_list(self):
pass
class TestCoreAddToDatabase(unittest.TestCase):
"""Test case for core's components related to adding entries to database"""
def setUp(self):
pass
def tearDown(self):
pass
def test_add_to_database(self):
pass
class TestCoreGenerateLaTeX(unittest.TestCase):
"""Test case for core's components related to generate LaTeX output"""
def setUp(self):
pass
def tearDown(self):
pass
def test_generate(self):
pass
if __name__ == "__main__":
unittest.main()
|
|
c464c509b69b4c61364b62ac584619744c4f6113
|
into/backends/tests/test_aws.py
|
into/backends/tests/test_aws.py
|
import pytest
from into import into, resource, S3, discover, CSV, s3
import pandas as pd
import pandas.util.testing as tm
import datashape
sa = pytest.importorskip('sqlalchemy')
pytest.importorskip('psycopg2')
pytest.importorskip('redshift_sqlalchemy')
tips_uri = 's3://nyqpug/tips.csv'
def test_s3_resource():
csv = resource(tips_uri)
assert isinstance(csv, S3)
def test_s3_discover():
csv = resource(tips_uri)
assert isinstance(discover(csv), datashape.DataShape)
@pytest.mark.xfail(raises=NotImplementedError,
reason='not implemented yet')
def test_frame_to_s3():
df = pd.DataFrame({
'a': list('abc'),
'b': [1, 2, 3],
'c': [1.0, 2.0, 3.0]
})[['a', 'b', 'c']]
to_this = s3(CSV)('s3://nyqpug/test.csv')
s3_csv = into(to_this, df)
tm.assert_frame_equal(into(pd.DataFrame, s3_csv), df)
@pytest.mark.xfail(raises=NotImplementedError,
reason=('need to figure out how to get a redshift instance '
'for testing'))
def test_s3_to_redshift():
redshift_uri = ('redshift+psycopg2://username@host.amazonaws.com:5439/'
'database::t')
table = into(redshift_uri, 's3://bucket/csvdir')
assert isinstance(table, sa.Table)
assert table.name == 't'
|
Add aws s3 into tests
|
Add aws s3 into tests
|
Python
|
bsd-3-clause
|
blaze/odo,Dannnno/odo,quantopian/odo,cowlicks/odo,ywang007/odo,alexmojaki/odo,cowlicks/odo,quantopian/odo,blaze/odo,alexmojaki/odo,ContinuumIO/odo,Dannnno/odo,cpcloud/odo,cpcloud/odo,ContinuumIO/odo,ywang007/odo
|
Add aws s3 into tests
|
import pytest
from into import into, resource, S3, discover, CSV, s3
import pandas as pd
import pandas.util.testing as tm
import datashape
sa = pytest.importorskip('sqlalchemy')
pytest.importorskip('psycopg2')
pytest.importorskip('redshift_sqlalchemy')
tips_uri = 's3://nyqpug/tips.csv'
def test_s3_resource():
csv = resource(tips_uri)
assert isinstance(csv, S3)
def test_s3_discover():
csv = resource(tips_uri)
assert isinstance(discover(csv), datashape.DataShape)
@pytest.mark.xfail(raises=NotImplementedError,
reason='not implemented yet')
def test_frame_to_s3():
df = pd.DataFrame({
'a': list('abc'),
'b': [1, 2, 3],
'c': [1.0, 2.0, 3.0]
})[['a', 'b', 'c']]
to_this = s3(CSV)('s3://nyqpug/test.csv')
s3_csv = into(to_this, df)
tm.assert_frame_equal(into(pd.DataFrame, s3_csv), df)
@pytest.mark.xfail(raises=NotImplementedError,
reason=('need to figure out how to get a redshift instance '
'for testing'))
def test_s3_to_redshift():
redshift_uri = ('redshift+psycopg2://username@host.amazonaws.com:5439/'
'database::t')
table = into(redshift_uri, 's3://bucket/csvdir')
assert isinstance(table, sa.Table)
assert table.name == 't'
|
<commit_before><commit_msg>Add aws s3 into tests<commit_after>
|
import pytest
from into import into, resource, S3, discover, CSV, s3
import pandas as pd
import pandas.util.testing as tm
import datashape
sa = pytest.importorskip('sqlalchemy')
pytest.importorskip('psycopg2')
pytest.importorskip('redshift_sqlalchemy')
tips_uri = 's3://nyqpug/tips.csv'
def test_s3_resource():
csv = resource(tips_uri)
assert isinstance(csv, S3)
def test_s3_discover():
csv = resource(tips_uri)
assert isinstance(discover(csv), datashape.DataShape)
@pytest.mark.xfail(raises=NotImplementedError,
reason='not implemented yet')
def test_frame_to_s3():
df = pd.DataFrame({
'a': list('abc'),
'b': [1, 2, 3],
'c': [1.0, 2.0, 3.0]
})[['a', 'b', 'c']]
to_this = s3(CSV)('s3://nyqpug/test.csv')
s3_csv = into(to_this, df)
tm.assert_frame_equal(into(pd.DataFrame, s3_csv), df)
@pytest.mark.xfail(raises=NotImplementedError,
reason=('need to figure out how to get a redshift instance '
'for testing'))
def test_s3_to_redshift():
redshift_uri = ('redshift+psycopg2://username@host.amazonaws.com:5439/'
'database::t')
table = into(redshift_uri, 's3://bucket/csvdir')
assert isinstance(table, sa.Table)
assert table.name == 't'
|
Add aws s3 into testsimport pytest
from into import into, resource, S3, discover, CSV, s3
import pandas as pd
import pandas.util.testing as tm
import datashape
sa = pytest.importorskip('sqlalchemy')
pytest.importorskip('psycopg2')
pytest.importorskip('redshift_sqlalchemy')
tips_uri = 's3://nyqpug/tips.csv'
def test_s3_resource():
csv = resource(tips_uri)
assert isinstance(csv, S3)
def test_s3_discover():
csv = resource(tips_uri)
assert isinstance(discover(csv), datashape.DataShape)
@pytest.mark.xfail(raises=NotImplementedError,
reason='not implemented yet')
def test_frame_to_s3():
df = pd.DataFrame({
'a': list('abc'),
'b': [1, 2, 3],
'c': [1.0, 2.0, 3.0]
})[['a', 'b', 'c']]
to_this = s3(CSV)('s3://nyqpug/test.csv')
s3_csv = into(to_this, df)
tm.assert_frame_equal(into(pd.DataFrame, s3_csv), df)
@pytest.mark.xfail(raises=NotImplementedError,
reason=('need to figure out how to get a redshift instance '
'for testing'))
def test_s3_to_redshift():
redshift_uri = ('redshift+psycopg2://username@host.amazonaws.com:5439/'
'database::t')
table = into(redshift_uri, 's3://bucket/csvdir')
assert isinstance(table, sa.Table)
assert table.name == 't'
|
<commit_before><commit_msg>Add aws s3 into tests<commit_after>import pytest
from into import into, resource, S3, discover, CSV, s3
import pandas as pd
import pandas.util.testing as tm
import datashape
sa = pytest.importorskip('sqlalchemy')
pytest.importorskip('psycopg2')
pytest.importorskip('redshift_sqlalchemy')
tips_uri = 's3://nyqpug/tips.csv'
def test_s3_resource():
csv = resource(tips_uri)
assert isinstance(csv, S3)
def test_s3_discover():
csv = resource(tips_uri)
assert isinstance(discover(csv), datashape.DataShape)
@pytest.mark.xfail(raises=NotImplementedError,
reason='not implemented yet')
def test_frame_to_s3():
df = pd.DataFrame({
'a': list('abc'),
'b': [1, 2, 3],
'c': [1.0, 2.0, 3.0]
})[['a', 'b', 'c']]
to_this = s3(CSV)('s3://nyqpug/test.csv')
s3_csv = into(to_this, df)
tm.assert_frame_equal(into(pd.DataFrame, s3_csv), df)
@pytest.mark.xfail(raises=NotImplementedError,
reason=('need to figure out how to get a redshift instance '
'for testing'))
def test_s3_to_redshift():
redshift_uri = ('redshift+psycopg2://username@host.amazonaws.com:5439/'
'database::t')
table = into(redshift_uri, 's3://bucket/csvdir')
assert isinstance(table, sa.Table)
assert table.name == 't'
|
|
1fb389bde5a6cd8812054046c8b76915d485e54c
|
tdb/utils/fix_input_file_spaces.py
|
tdb/utils/fix_input_file_spaces.py
|
import os, subprocess
import re
def fix_input_file_spaces():
path = '../../fludata/VIDRL-Melbourne-WHO-CC/raw-data/'
for ab in os.listdir(path):
ab_path = '{}{}/'.format(path, ab)
for subtype in os.listdir(ab_path):
subtype_path = '{}{}/'.format(ab_path, subtype)
for assay in os.listdir(subtype_path):
complete_path = '{}{}/'.format(subtype_path, assay)
for fname in os.listdir(complete_path):
fpath = complete_path + fname
if ' ' in fname:
new_fname = fname.replace(' ', '-')
fstem = fname.split('.')[0]
fext = fname.split('.')[1]
fstem = re.escape(fstem)
fname = "{}.{}".format(fstem,fext)
command = 'mv {}{} {}{}'.format(complete_path, fname, complete_path, new_fname)
print command
subprocess.call(command, shell=True)
if __name__=="__main__":
fix_input_file_spaces()
|
Add name space replacement utility.
|
Add name space replacement utility.
|
Python
|
agpl-3.0
|
nextstrain/fauna,blab/nextstrain-db,nextstrain/fauna,blab/nextstrain-db
|
Add name space replacement utility.
|
import os, subprocess
import re
def fix_input_file_spaces():
path = '../../fludata/VIDRL-Melbourne-WHO-CC/raw-data/'
for ab in os.listdir(path):
ab_path = '{}{}/'.format(path, ab)
for subtype in os.listdir(ab_path):
subtype_path = '{}{}/'.format(ab_path, subtype)
for assay in os.listdir(subtype_path):
complete_path = '{}{}/'.format(subtype_path, assay)
for fname in os.listdir(complete_path):
fpath = complete_path + fname
if ' ' in fname:
new_fname = fname.replace(' ', '-')
fstem = fname.split('.')[0]
fext = fname.split('.')[1]
fstem = re.escape(fstem)
fname = "{}.{}".format(fstem,fext)
command = 'mv {}{} {}{}'.format(complete_path, fname, complete_path, new_fname)
print command
subprocess.call(command, shell=True)
if __name__=="__main__":
fix_input_file_spaces()
|
<commit_before><commit_msg>Add name space replacement utility.<commit_after>
|
import os, subprocess
import re
def fix_input_file_spaces():
path = '../../fludata/VIDRL-Melbourne-WHO-CC/raw-data/'
for ab in os.listdir(path):
ab_path = '{}{}/'.format(path, ab)
for subtype in os.listdir(ab_path):
subtype_path = '{}{}/'.format(ab_path, subtype)
for assay in os.listdir(subtype_path):
complete_path = '{}{}/'.format(subtype_path, assay)
for fname in os.listdir(complete_path):
fpath = complete_path + fname
if ' ' in fname:
new_fname = fname.replace(' ', '-')
fstem = fname.split('.')[0]
fext = fname.split('.')[1]
fstem = re.escape(fstem)
fname = "{}.{}".format(fstem,fext)
command = 'mv {}{} {}{}'.format(complete_path, fname, complete_path, new_fname)
print command
subprocess.call(command, shell=True)
if __name__=="__main__":
fix_input_file_spaces()
|
Add name space replacement utility.import os, subprocess
import re
def fix_input_file_spaces():
path = '../../fludata/VIDRL-Melbourne-WHO-CC/raw-data/'
for ab in os.listdir(path):
ab_path = '{}{}/'.format(path, ab)
for subtype in os.listdir(ab_path):
subtype_path = '{}{}/'.format(ab_path, subtype)
for assay in os.listdir(subtype_path):
complete_path = '{}{}/'.format(subtype_path, assay)
for fname in os.listdir(complete_path):
fpath = complete_path + fname
if ' ' in fname:
new_fname = fname.replace(' ', '-')
fstem = fname.split('.')[0]
fext = fname.split('.')[1]
fstem = re.escape(fstem)
fname = "{}.{}".format(fstem,fext)
command = 'mv {}{} {}{}'.format(complete_path, fname, complete_path, new_fname)
print command
subprocess.call(command, shell=True)
if __name__=="__main__":
fix_input_file_spaces()
|
<commit_before><commit_msg>Add name space replacement utility.<commit_after>import os, subprocess
import re
def fix_input_file_spaces():
path = '../../fludata/VIDRL-Melbourne-WHO-CC/raw-data/'
for ab in os.listdir(path):
ab_path = '{}{}/'.format(path, ab)
for subtype in os.listdir(ab_path):
subtype_path = '{}{}/'.format(ab_path, subtype)
for assay in os.listdir(subtype_path):
complete_path = '{}{}/'.format(subtype_path, assay)
for fname in os.listdir(complete_path):
fpath = complete_path + fname
if ' ' in fname:
new_fname = fname.replace(' ', '-')
fstem = fname.split('.')[0]
fext = fname.split('.')[1]
fstem = re.escape(fstem)
fname = "{}.{}".format(fstem,fext)
command = 'mv {}{} {}{}'.format(complete_path, fname, complete_path, new_fname)
print command
subprocess.call(command, shell=True)
if __name__=="__main__":
fix_input_file_spaces()
|
|
7f01e9e622eb14db003d125bd7041671452fc9c6
|
hacks/gpiotesting.py
|
hacks/gpiotesting.py
|
# from www.thirdeyevis.com/pi-page-2.php
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(13, GPIO.OUT)
def blinkity():
for i in range(0,5):
GPIO.output(13, True)
time.sleep(.5)
GPIO.output(7, True)
time.sleep(.5)
GPIO.output(7, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(13, False)
time.sleep(.2)
def upity():
for i in range(0,15):
GPIO.output(13, True)
time.sleep(.2)
GPIO.output(7, True)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
GPIO.output(7, False)
GPIO.output(13, False)
time.sleep(.5)
#while True:
print("wait")
GPIO.wait_for_edge(12, GPIO.FALLING)
print("got")
blinkity()
GPIO.wait_for_edge(16, GPIO.FALLING)
upity()
GPIO.cleanup()
|
Add an early gpio testing script
|
Add an early gpio testing script
|
Python
|
mit
|
joadavis/rpi-coding
|
Add an early gpio testing script
|
# from www.thirdeyevis.com/pi-page-2.php
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(13, GPIO.OUT)
def blinkity():
for i in range(0,5):
GPIO.output(13, True)
time.sleep(.5)
GPIO.output(7, True)
time.sleep(.5)
GPIO.output(7, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(13, False)
time.sleep(.2)
def upity():
for i in range(0,15):
GPIO.output(13, True)
time.sleep(.2)
GPIO.output(7, True)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
GPIO.output(7, False)
GPIO.output(13, False)
time.sleep(.5)
#while True:
print("wait")
GPIO.wait_for_edge(12, GPIO.FALLING)
print("got")
blinkity()
GPIO.wait_for_edge(16, GPIO.FALLING)
upity()
GPIO.cleanup()
|
<commit_before><commit_msg>Add an early gpio testing script<commit_after>
|
# from www.thirdeyevis.com/pi-page-2.php
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(13, GPIO.OUT)
def blinkity():
for i in range(0,5):
GPIO.output(13, True)
time.sleep(.5)
GPIO.output(7, True)
time.sleep(.5)
GPIO.output(7, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(13, False)
time.sleep(.2)
def upity():
for i in range(0,15):
GPIO.output(13, True)
time.sleep(.2)
GPIO.output(7, True)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
GPIO.output(7, False)
GPIO.output(13, False)
time.sleep(.5)
#while True:
print("wait")
GPIO.wait_for_edge(12, GPIO.FALLING)
print("got")
blinkity()
GPIO.wait_for_edge(16, GPIO.FALLING)
upity()
GPIO.cleanup()
|
Add an early gpio testing script# from www.thirdeyevis.com/pi-page-2.php
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(13, GPIO.OUT)
def blinkity():
for i in range(0,5):
GPIO.output(13, True)
time.sleep(.5)
GPIO.output(7, True)
time.sleep(.5)
GPIO.output(7, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(13, False)
time.sleep(.2)
def upity():
for i in range(0,15):
GPIO.output(13, True)
time.sleep(.2)
GPIO.output(7, True)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
GPIO.output(7, False)
GPIO.output(13, False)
time.sleep(.5)
#while True:
print("wait")
GPIO.wait_for_edge(12, GPIO.FALLING)
print("got")
blinkity()
GPIO.wait_for_edge(16, GPIO.FALLING)
upity()
GPIO.cleanup()
|
<commit_before><commit_msg>Add an early gpio testing script<commit_after># from www.thirdeyevis.com/pi-page-2.php
import RPi.GPIO as GPIO
import time
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
GPIO.setup(11, GPIO.OUT)
GPIO.setup(12, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(16, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(13, GPIO.OUT)
def blinkity():
for i in range(0,5):
GPIO.output(13, True)
time.sleep(.5)
GPIO.output(7, True)
time.sleep(.5)
GPIO.output(7, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
time.sleep(.2)
GPIO.output(13, False)
time.sleep(.2)
def upity():
for i in range(0,15):
GPIO.output(13, True)
time.sleep(.2)
GPIO.output(7, True)
time.sleep(.2)
GPIO.output(11, True)
time.sleep(.5)
GPIO.output(11, False)
GPIO.output(7, False)
GPIO.output(13, False)
time.sleep(.5)
#while True:
print("wait")
GPIO.wait_for_edge(12, GPIO.FALLING)
print("got")
blinkity()
GPIO.wait_for_edge(16, GPIO.FALLING)
upity()
GPIO.cleanup()
|
|
22dc87589131a86b4d2e38eaf67ec4601b1436ee
|
tests/views/test_blog_post_page.py
|
tests/views/test_blog_post_page.py
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, PostData
import urllib2
class TestBlogPages(PMGLiveServerTestCase):
def setUp(self):
super(TestBlogPages, self).setUp()
self.fx = dbfixture.data(PostData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestBlogPages, self).tearDown()
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
|
Add 'smoke test' for single blog post page
|
Add 'smoke test' for single blog post page
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
Add 'smoke test' for single blog post page
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, PostData
import urllib2
class TestBlogPages(PMGLiveServerTestCase):
def setUp(self):
super(TestBlogPages, self).setUp()
self.fx = dbfixture.data(PostData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestBlogPages, self).tearDown()
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
|
<commit_before><commit_msg>Add 'smoke test' for single blog post page<commit_after>
|
from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, PostData
import urllib2
class TestBlogPages(PMGLiveServerTestCase):
def setUp(self):
super(TestBlogPages, self).setUp()
self.fx = dbfixture.data(PostData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestBlogPages, self).tearDown()
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
|
Add 'smoke test' for single blog post pagefrom tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, PostData
import urllib2
class TestBlogPages(PMGLiveServerTestCase):
def setUp(self):
super(TestBlogPages, self).setUp()
self.fx = dbfixture.data(PostData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestBlogPages, self).tearDown()
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
|
<commit_before><commit_msg>Add 'smoke test' for single blog post page<commit_after>from tests import PMGLiveServerTestCase
from tests.fixtures import dbfixture, PostData
import urllib2
class TestBlogPages(PMGLiveServerTestCase):
def setUp(self):
super(TestBlogPages, self).setUp()
self.fx = dbfixture.data(PostData)
self.fx.setup()
def tearDown(self):
self.fx.teardown()
super(TestBlogPages, self).tearDown()
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
def test_blog_post_page(self):
"""
Test blog post page (http://pmg.test:5000/blog/<slug>)
"""
post = self.fx.PostData.the_week_ahead
self.get_page_contents(
"http://pmg.test:5000/blog/%s/"
% post.slug
)
self.assertIn(post.title, self.html)
self.assertIn(post.body[0:100], self.html)
self.assertIn('That week in Parliament', self.html)
|
|
58bec298ac5e400c9d33f440130ad777e162b410
|
numba/tests/test_dyn_array.py
|
numba/tests/test_dyn_array.py
|
from __future__ import print_function, absolute_import, division
import numpy as np
from numba import unittest_support as unittest
from numba import njit
class TestDynArray(unittest.TestCase):
def test_empty_1d(self):
@njit
def foo(n):
arr = np.empty(n)
for i in range(n):
arr[i] = i
return arr
n = 3
arr = foo(n)
np.testing.assert_equal(np.arange(n), arr)
self.assertEqual(arr.size, n)
self.assertEqual(arr.shape, (n,))
self.assertEqual(arr.dtype, np.dtype(np.float64))
self.assertEqual(arr.strides, (np.dtype(np.float64).itemsize,))
arr.fill(123) # test writability
np.testing.assert_equal(123, arr)
del arr
if __name__ == "__main__":
unittest.main()
|
Add test for np.empty 1d case
|
Add test for np.empty 1d case
|
Python
|
bsd-2-clause
|
IntelLabs/numba,stonebig/numba,jriehl/numba,pitrou/numba,stefanseefeld/numba,IntelLabs/numba,ssarangi/numba,numba/numba,ssarangi/numba,jriehl/numba,pitrou/numba,sklam/numba,stuartarchibald/numba,jriehl/numba,pombredanne/numba,stonebig/numba,pitrou/numba,cpcloud/numba,pitrou/numba,seibert/numba,cpcloud/numba,cpcloud/numba,seibert/numba,cpcloud/numba,gmarkall/numba,stonebig/numba,pombredanne/numba,pombredanne/numba,sklam/numba,pitrou/numba,numba/numba,stefanseefeld/numba,gdementen/numba,IntelLabs/numba,stefanseefeld/numba,stefanseefeld/numba,numba/numba,sklam/numba,gdementen/numba,jriehl/numba,numba/numba,IntelLabs/numba,gmarkall/numba,stonebig/numba,ssarangi/numba,ssarangi/numba,gmarkall/numba,jriehl/numba,stuartarchibald/numba,gdementen/numba,cpcloud/numba,gdementen/numba,stefanseefeld/numba,pombredanne/numba,stonebig/numba,GaZ3ll3/numba,GaZ3ll3/numba,sklam/numba,numba/numba,GaZ3ll3/numba,seibert/numba,gmarkall/numba,GaZ3ll3/numba,gdementen/numba,stuartarchibald/numba,sklam/numba,gmarkall/numba,GaZ3ll3/numba,ssarangi/numba,stuartarchibald/numba,stuartarchibald/numba,pombredanne/numba,seibert/numba,seibert/numba,IntelLabs/numba
|
Add test for np.empty 1d case
|
from __future__ import print_function, absolute_import, division
import numpy as np
from numba import unittest_support as unittest
from numba import njit
class TestDynArray(unittest.TestCase):
def test_empty_1d(self):
@njit
def foo(n):
arr = np.empty(n)
for i in range(n):
arr[i] = i
return arr
n = 3
arr = foo(n)
np.testing.assert_equal(np.arange(n), arr)
self.assertEqual(arr.size, n)
self.assertEqual(arr.shape, (n,))
self.assertEqual(arr.dtype, np.dtype(np.float64))
self.assertEqual(arr.strides, (np.dtype(np.float64).itemsize,))
arr.fill(123) # test writability
np.testing.assert_equal(123, arr)
del arr
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for np.empty 1d case<commit_after>
|
from __future__ import print_function, absolute_import, division
import numpy as np
from numba import unittest_support as unittest
from numba import njit
class TestDynArray(unittest.TestCase):
def test_empty_1d(self):
@njit
def foo(n):
arr = np.empty(n)
for i in range(n):
arr[i] = i
return arr
n = 3
arr = foo(n)
np.testing.assert_equal(np.arange(n), arr)
self.assertEqual(arr.size, n)
self.assertEqual(arr.shape, (n,))
self.assertEqual(arr.dtype, np.dtype(np.float64))
self.assertEqual(arr.strides, (np.dtype(np.float64).itemsize,))
arr.fill(123) # test writability
np.testing.assert_equal(123, arr)
del arr
if __name__ == "__main__":
unittest.main()
|
Add test for np.empty 1d casefrom __future__ import print_function, absolute_import, division
import numpy as np
from numba import unittest_support as unittest
from numba import njit
class TestDynArray(unittest.TestCase):
def test_empty_1d(self):
@njit
def foo(n):
arr = np.empty(n)
for i in range(n):
arr[i] = i
return arr
n = 3
arr = foo(n)
np.testing.assert_equal(np.arange(n), arr)
self.assertEqual(arr.size, n)
self.assertEqual(arr.shape, (n,))
self.assertEqual(arr.dtype, np.dtype(np.float64))
self.assertEqual(arr.strides, (np.dtype(np.float64).itemsize,))
arr.fill(123) # test writability
np.testing.assert_equal(123, arr)
del arr
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add test for np.empty 1d case<commit_after>from __future__ import print_function, absolute_import, division
import numpy as np
from numba import unittest_support as unittest
from numba import njit
class TestDynArray(unittest.TestCase):
def test_empty_1d(self):
@njit
def foo(n):
arr = np.empty(n)
for i in range(n):
arr[i] = i
return arr
n = 3
arr = foo(n)
np.testing.assert_equal(np.arange(n), arr)
self.assertEqual(arr.size, n)
self.assertEqual(arr.shape, (n,))
self.assertEqual(arr.dtype, np.dtype(np.float64))
self.assertEqual(arr.strides, (np.dtype(np.float64).itemsize,))
arr.fill(123) # test writability
np.testing.assert_equal(123, arr)
del arr
if __name__ == "__main__":
unittest.main()
|
|
47a11b49d79829e97ed8ca0ca4d6dde115b96d08
|
test/main_test.py
|
test/main_test.py
|
import theanets
import numpy as np
import util
class TestExperiment(util.MNIST):
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Autoencoder, layers=(self.DIGIT_SIZE, 2, self.DIGIT_SIZE))
assert isinstance(exp.network, theanets.Autoencoder)
def test_create_classifier(self):
exp = theanets.Experiment(
theanets.Classifier, layers=(self.DIGIT_SIZE, 2, 3))
assert isinstance(exp.network, theanets.Classifier)
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Regressor, layers=(self.DIGIT_SIZE, 2, 4))
assert isinstance(exp.network, theanets.Regressor)
|
Add a few basic tests for Experiment code.
|
Add a few basic tests for Experiment code.
|
Python
|
mit
|
lmjohns3/theanets,devdoer/theanets,chrinide/theanets
|
Add a few basic tests for Experiment code.
|
import theanets
import numpy as np
import util
class TestExperiment(util.MNIST):
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Autoencoder, layers=(self.DIGIT_SIZE, 2, self.DIGIT_SIZE))
assert isinstance(exp.network, theanets.Autoencoder)
def test_create_classifier(self):
exp = theanets.Experiment(
theanets.Classifier, layers=(self.DIGIT_SIZE, 2, 3))
assert isinstance(exp.network, theanets.Classifier)
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Regressor, layers=(self.DIGIT_SIZE, 2, 4))
assert isinstance(exp.network, theanets.Regressor)
|
<commit_before><commit_msg>Add a few basic tests for Experiment code.<commit_after>
|
import theanets
import numpy as np
import util
class TestExperiment(util.MNIST):
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Autoencoder, layers=(self.DIGIT_SIZE, 2, self.DIGIT_SIZE))
assert isinstance(exp.network, theanets.Autoencoder)
def test_create_classifier(self):
exp = theanets.Experiment(
theanets.Classifier, layers=(self.DIGIT_SIZE, 2, 3))
assert isinstance(exp.network, theanets.Classifier)
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Regressor, layers=(self.DIGIT_SIZE, 2, 4))
assert isinstance(exp.network, theanets.Regressor)
|
Add a few basic tests for Experiment code.import theanets
import numpy as np
import util
class TestExperiment(util.MNIST):
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Autoencoder, layers=(self.DIGIT_SIZE, 2, self.DIGIT_SIZE))
assert isinstance(exp.network, theanets.Autoencoder)
def test_create_classifier(self):
exp = theanets.Experiment(
theanets.Classifier, layers=(self.DIGIT_SIZE, 2, 3))
assert isinstance(exp.network, theanets.Classifier)
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Regressor, layers=(self.DIGIT_SIZE, 2, 4))
assert isinstance(exp.network, theanets.Regressor)
|
<commit_before><commit_msg>Add a few basic tests for Experiment code.<commit_after>import theanets
import numpy as np
import util
class TestExperiment(util.MNIST):
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Autoencoder, layers=(self.DIGIT_SIZE, 2, self.DIGIT_SIZE))
assert isinstance(exp.network, theanets.Autoencoder)
def test_create_classifier(self):
exp = theanets.Experiment(
theanets.Classifier, layers=(self.DIGIT_SIZE, 2, 3))
assert isinstance(exp.network, theanets.Classifier)
def test_create_autoencoder(self):
exp = theanets.Experiment(
theanets.Regressor, layers=(self.DIGIT_SIZE, 2, 4))
assert isinstance(exp.network, theanets.Regressor)
|
|
a506d8d45d0824b9b70af35831c1bde69906617e
|
test/test_soft.py
|
test/test_soft.py
|
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
|
Add rudimentary unit test for SoftU2FDevice
|
Add rudimentary unit test for SoftU2FDevice
|
Python
|
bsd-2-clause
|
Yubico/python-u2flib-host
|
Add rudimentary unit test for SoftU2FDevice
|
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
|
<commit_before><commit_msg>Add rudimentary unit test for SoftU2FDevice<commit_after>
|
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
|
Add rudimentary unit test for SoftU2FDevice
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
|
<commit_before><commit_msg>Add rudimentary unit test for SoftU2FDevice<commit_after>
import os
import base64
import struct
import tempfile
import unittest
from u2flib_host.soft import SoftU2FDevice
from u2flib_host.constants import INS_ENROLL, INS_SIGN
CLIENT_PARAM = b'clientABCDEFGHIJKLMNOPQRSTUVWXYZ' # 32 bytes
APP_PARAM = b'test_SoftU2FDevice0123456789ABCD' # 32 bytes
class TestSoftU2FDevice(unittest.TestCase):
def setUp(self):
with tempfile.NamedTemporaryFile(delete=False) as f:
f.write(b'{"counter": 0, "keys": {}}')
self.device_path = f.name
def tearDown(self):
os.unlink(self.device_path)
def test_init(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.data['counter'], 0)
self.assertEqual(dev.data['keys'], {})
def test_get_supported_versions(self):
dev = SoftU2FDevice(self.device_path)
self.assertEqual(dev.get_supported_versions(), ['U2F_V2'])
def test_registeration(self):
dev = SoftU2FDevice(self.device_path)
request = struct.pack('32s 32s', CLIENT_PARAM, APP_PARAM)
response = dev.send_apdu(INS_ENROLL, data=request)
self.assertEqual(dev.data['counter'], 0)
self.assertTrue(len(dev.data['keys']), 1)
pub_key, key_handle_len, key_handle, cert, signature = struct.unpack('x 65s B 64s %is 32s' % (len(response)-(1+65+1+64+32),), response)
self.assertEqual(len(key_handle), key_handle_len)
kh_hex = base64.b16encode(key_handle).decode('ascii')
self.assertIn(kh_hex, dev.data['keys'])
self.assertEqual(base64.b16decode(dev.data['keys'][kh_hex]['app_param']), APP_PARAM)
self.assertEqual(dev.data['keys'][kh_hex]['priv_key'].split('\n')[0],
'-----BEGIN PRIVATE KEY-----')
request = struct.pack('32s 32s B %is' % key_handle_len,
CLIENT_PARAM, APP_PARAM, key_handle_len, key_handle)
response = dev.send_apdu(INS_SIGN, data=request)
self.assertEqual(dev.data['counter'], 1)
touch, counter, signature = struct.unpack('>? I %is' % (len(response)-(1+4),), response)
self.assertTrue(touch)
self.assertEqual(counter, 1)
|
|
3ceddeed21abe277224750764eaf10f6c8f49a60
|
py/statemachines/simple_state_machine_script_test.py
|
py/statemachines/simple_state_machine_script_test.py
|
#----------------------------------------------------------------------------------------
# BEGIN: READ_HEXAPOD_CURRENT_POSE
# TEMPLATE: ReadTransformState
#
smach.StateMachine.add('READ_HEXAPOD_CURRENT_POSE', TFListenerState('ur10_1/base', 'hexapod_1/top', 'hexapod_current_pose'),
transitions={'succeeded':'MOVE_ABOVE_HEXAPOD_1'},
remapping={'hexapod_current_pose':'hexapod_current_pose'})
# END: READ_HEXAPOD_CURRENT_POSE
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# BEGIN: MOVE_ABOVE_HEXAPOD_1
# TEMPLATE: CartTrapVelActionState
#
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_position_offset = np.asarray([0.0, 0.0, -0.2])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_rotation_offset = np.asarray([0.0, 0.0, 0.0])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_desired_velocity = 0.1
smach.StateMachine.add('MOVE_ABOVE_HEXAPOD_1',
smach_ros.SimpleActionState('/ur10_1/cart_trap_vel_action_server', robot_module.msg.CartTrapVelAction,
goal_cb = cart_trap_vel_goal_cb,
input_keys=['cart_trap_vel_pose_input',
'cart_trap_vel_position_offset_input',
'cart_trap_vel_rotation_offset_input',
'cart_trap_vel_desired_velocity_input']),
transitions={'succeeded':'OPEN_TOOL_EXCHANGE_1'},
remapping={'cart_trap_vel_pose_input':'hexapod_current_pose',
'cart_trap_vel_position_offset_input':'MOVE_ABOVE_HEXAPOD_1_position_offset',
'cart_trap_vel_rotation_offset_input':'MOVE_ABOVE_HEXAPOD_1_rotation_offset',
'cart_trap_vel_desired_velocity_input':'MOVE_ABOVE_HEXAPOD_1_desired_velocity'})
# END: MOVE_ABOVE_HEXAPOD_1
#----------------------------------------------------------------------------------------
|
Add basic working prototype of SMACH/Jinja2 template-based code generation.
|
Add basic working prototype of SMACH/Jinja2 template-based code generation.
|
Python
|
bsd-3-clause
|
ReconCell/smacha,ReconCell/smacha
|
Add basic working prototype of SMACH/Jinja2 template-based code generation.
|
#----------------------------------------------------------------------------------------
# BEGIN: READ_HEXAPOD_CURRENT_POSE
# TEMPLATE: ReadTransformState
#
smach.StateMachine.add('READ_HEXAPOD_CURRENT_POSE', TFListenerState('ur10_1/base', 'hexapod_1/top', 'hexapod_current_pose'),
transitions={'succeeded':'MOVE_ABOVE_HEXAPOD_1'},
remapping={'hexapod_current_pose':'hexapod_current_pose'})
# END: READ_HEXAPOD_CURRENT_POSE
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# BEGIN: MOVE_ABOVE_HEXAPOD_1
# TEMPLATE: CartTrapVelActionState
#
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_position_offset = np.asarray([0.0, 0.0, -0.2])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_rotation_offset = np.asarray([0.0, 0.0, 0.0])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_desired_velocity = 0.1
smach.StateMachine.add('MOVE_ABOVE_HEXAPOD_1',
smach_ros.SimpleActionState('/ur10_1/cart_trap_vel_action_server', robot_module.msg.CartTrapVelAction,
goal_cb = cart_trap_vel_goal_cb,
input_keys=['cart_trap_vel_pose_input',
'cart_trap_vel_position_offset_input',
'cart_trap_vel_rotation_offset_input',
'cart_trap_vel_desired_velocity_input']),
transitions={'succeeded':'OPEN_TOOL_EXCHANGE_1'},
remapping={'cart_trap_vel_pose_input':'hexapod_current_pose',
'cart_trap_vel_position_offset_input':'MOVE_ABOVE_HEXAPOD_1_position_offset',
'cart_trap_vel_rotation_offset_input':'MOVE_ABOVE_HEXAPOD_1_rotation_offset',
'cart_trap_vel_desired_velocity_input':'MOVE_ABOVE_HEXAPOD_1_desired_velocity'})
# END: MOVE_ABOVE_HEXAPOD_1
#----------------------------------------------------------------------------------------
|
<commit_before><commit_msg>Add basic working prototype of SMACH/Jinja2 template-based code generation.<commit_after>
|
#----------------------------------------------------------------------------------------
# BEGIN: READ_HEXAPOD_CURRENT_POSE
# TEMPLATE: ReadTransformState
#
smach.StateMachine.add('READ_HEXAPOD_CURRENT_POSE', TFListenerState('ur10_1/base', 'hexapod_1/top', 'hexapod_current_pose'),
transitions={'succeeded':'MOVE_ABOVE_HEXAPOD_1'},
remapping={'hexapod_current_pose':'hexapod_current_pose'})
# END: READ_HEXAPOD_CURRENT_POSE
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# BEGIN: MOVE_ABOVE_HEXAPOD_1
# TEMPLATE: CartTrapVelActionState
#
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_position_offset = np.asarray([0.0, 0.0, -0.2])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_rotation_offset = np.asarray([0.0, 0.0, 0.0])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_desired_velocity = 0.1
smach.StateMachine.add('MOVE_ABOVE_HEXAPOD_1',
smach_ros.SimpleActionState('/ur10_1/cart_trap_vel_action_server', robot_module.msg.CartTrapVelAction,
goal_cb = cart_trap_vel_goal_cb,
input_keys=['cart_trap_vel_pose_input',
'cart_trap_vel_position_offset_input',
'cart_trap_vel_rotation_offset_input',
'cart_trap_vel_desired_velocity_input']),
transitions={'succeeded':'OPEN_TOOL_EXCHANGE_1'},
remapping={'cart_trap_vel_pose_input':'hexapod_current_pose',
'cart_trap_vel_position_offset_input':'MOVE_ABOVE_HEXAPOD_1_position_offset',
'cart_trap_vel_rotation_offset_input':'MOVE_ABOVE_HEXAPOD_1_rotation_offset',
'cart_trap_vel_desired_velocity_input':'MOVE_ABOVE_HEXAPOD_1_desired_velocity'})
# END: MOVE_ABOVE_HEXAPOD_1
#----------------------------------------------------------------------------------------
|
Add basic working prototype of SMACH/Jinja2 template-based code generation.#----------------------------------------------------------------------------------------
# BEGIN: READ_HEXAPOD_CURRENT_POSE
# TEMPLATE: ReadTransformState
#
smach.StateMachine.add('READ_HEXAPOD_CURRENT_POSE', TFListenerState('ur10_1/base', 'hexapod_1/top', 'hexapod_current_pose'),
transitions={'succeeded':'MOVE_ABOVE_HEXAPOD_1'},
remapping={'hexapod_current_pose':'hexapod_current_pose'})
# END: READ_HEXAPOD_CURRENT_POSE
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# BEGIN: MOVE_ABOVE_HEXAPOD_1
# TEMPLATE: CartTrapVelActionState
#
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_position_offset = np.asarray([0.0, 0.0, -0.2])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_rotation_offset = np.asarray([0.0, 0.0, 0.0])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_desired_velocity = 0.1
smach.StateMachine.add('MOVE_ABOVE_HEXAPOD_1',
smach_ros.SimpleActionState('/ur10_1/cart_trap_vel_action_server', robot_module.msg.CartTrapVelAction,
goal_cb = cart_trap_vel_goal_cb,
input_keys=['cart_trap_vel_pose_input',
'cart_trap_vel_position_offset_input',
'cart_trap_vel_rotation_offset_input',
'cart_trap_vel_desired_velocity_input']),
transitions={'succeeded':'OPEN_TOOL_EXCHANGE_1'},
remapping={'cart_trap_vel_pose_input':'hexapod_current_pose',
'cart_trap_vel_position_offset_input':'MOVE_ABOVE_HEXAPOD_1_position_offset',
'cart_trap_vel_rotation_offset_input':'MOVE_ABOVE_HEXAPOD_1_rotation_offset',
'cart_trap_vel_desired_velocity_input':'MOVE_ABOVE_HEXAPOD_1_desired_velocity'})
# END: MOVE_ABOVE_HEXAPOD_1
#----------------------------------------------------------------------------------------
|
<commit_before><commit_msg>Add basic working prototype of SMACH/Jinja2 template-based code generation.<commit_after>#----------------------------------------------------------------------------------------
# BEGIN: READ_HEXAPOD_CURRENT_POSE
# TEMPLATE: ReadTransformState
#
smach.StateMachine.add('READ_HEXAPOD_CURRENT_POSE', TFListenerState('ur10_1/base', 'hexapod_1/top', 'hexapod_current_pose'),
transitions={'succeeded':'MOVE_ABOVE_HEXAPOD_1'},
remapping={'hexapod_current_pose':'hexapod_current_pose'})
# END: READ_HEXAPOD_CURRENT_POSE
#----------------------------------------------------------------------------------------
#----------------------------------------------------------------------------------------
# BEGIN: MOVE_ABOVE_HEXAPOD_1
# TEMPLATE: CartTrapVelActionState
#
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_position_offset = np.asarray([0.0, 0.0, -0.2])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_rotation_offset = np.asarray([0.0, 0.0, 0.0])
sm_sub.userdata.MOVE_ABOVE_HEXAPOD_1_desired_velocity = 0.1
smach.StateMachine.add('MOVE_ABOVE_HEXAPOD_1',
smach_ros.SimpleActionState('/ur10_1/cart_trap_vel_action_server', robot_module.msg.CartTrapVelAction,
goal_cb = cart_trap_vel_goal_cb,
input_keys=['cart_trap_vel_pose_input',
'cart_trap_vel_position_offset_input',
'cart_trap_vel_rotation_offset_input',
'cart_trap_vel_desired_velocity_input']),
transitions={'succeeded':'OPEN_TOOL_EXCHANGE_1'},
remapping={'cart_trap_vel_pose_input':'hexapod_current_pose',
'cart_trap_vel_position_offset_input':'MOVE_ABOVE_HEXAPOD_1_position_offset',
'cart_trap_vel_rotation_offset_input':'MOVE_ABOVE_HEXAPOD_1_rotation_offset',
'cart_trap_vel_desired_velocity_input':'MOVE_ABOVE_HEXAPOD_1_desired_velocity'})
# END: MOVE_ABOVE_HEXAPOD_1
#----------------------------------------------------------------------------------------
|
|
97a7fdfeff590dab0d38209d7cdb5657feb97635
|
examples/hv_grid.py
|
examples/hv_grid.py
|
from tools.pypsa_io import oedb_session, get_pq_sets,\
get_timerange, import_components, import_pq_sets, create_powerflow_problem,\
add_coordinates, plot_line_loading
from egoio.db_tables.calc_ego_hv_powerflow import Bus, Line, Generator, Load, \
Transformer, TempResolution, GeneratorPqSet, LoadPqSet
session = oedb_session()
scenario = 'Status Quo'
# define relevant tables of generator table
pq_set_cols = ['temp_id', 'p_set']
# choose temp_id
temp_id_set = 1
start_h = 1
end_h = 2
# examplary call of pq-set retrieval
gen_pq_set = get_pq_sets(session, GeneratorPqSet, scenario,
index_col='generator_id', columns=pq_set_cols)
load_pq_set = get_pq_sets(session, LoadPqSet, scenario, index_col='load_id',
columns=pq_set_cols)
# define investigated time range
timerange = get_timerange(session, temp_id_set, TempResolution, start_h, end_h)
# define relevant tables
tables = [Bus, Line, Generator, Load, Transformer]
# get components from database tables
components = import_components(tables, session, scenario)
# create PyPSA powerflow problem
network, snapshots = create_powerflow_problem(timerange, components)
# import pq-set tables to pypsa network
pq_object = [GeneratorPqSet, LoadPqSet]
network = import_pq_sets(session,
network,
pq_object,
timerange,
scenario)
# add coordinates to network nodes and make ready for map plotting
network = add_coordinates(network)
# start powerflow calculations
network.pf(snapshots)
# make a line loading plot
plot_line_loading(network, output='show')
# close session
session.close()
|
Add example for hv grid
|
Add example for hv grid
|
Python
|
agpl-3.0
|
openego/ego.powerflow
|
Add example for hv grid
|
from tools.pypsa_io import oedb_session, get_pq_sets,\
get_timerange, import_components, import_pq_sets, create_powerflow_problem,\
add_coordinates, plot_line_loading
from egoio.db_tables.calc_ego_hv_powerflow import Bus, Line, Generator, Load, \
Transformer, TempResolution, GeneratorPqSet, LoadPqSet
session = oedb_session()
scenario = 'Status Quo'
# define relevant tables of generator table
pq_set_cols = ['temp_id', 'p_set']
# choose temp_id
temp_id_set = 1
start_h = 1
end_h = 2
# examplary call of pq-set retrieval
gen_pq_set = get_pq_sets(session, GeneratorPqSet, scenario,
index_col='generator_id', columns=pq_set_cols)
load_pq_set = get_pq_sets(session, LoadPqSet, scenario, index_col='load_id',
columns=pq_set_cols)
# define investigated time range
timerange = get_timerange(session, temp_id_set, TempResolution, start_h, end_h)
# define relevant tables
tables = [Bus, Line, Generator, Load, Transformer]
# get components from database tables
components = import_components(tables, session, scenario)
# create PyPSA powerflow problem
network, snapshots = create_powerflow_problem(timerange, components)
# import pq-set tables to pypsa network
pq_object = [GeneratorPqSet, LoadPqSet]
network = import_pq_sets(session,
network,
pq_object,
timerange,
scenario)
# add coordinates to network nodes and make ready for map plotting
network = add_coordinates(network)
# start powerflow calculations
network.pf(snapshots)
# make a line loading plot
plot_line_loading(network, output='show')
# close session
session.close()
|
<commit_before><commit_msg>Add example for hv grid<commit_after>
|
from tools.pypsa_io import oedb_session, get_pq_sets,\
get_timerange, import_components, import_pq_sets, create_powerflow_problem,\
add_coordinates, plot_line_loading
from egoio.db_tables.calc_ego_hv_powerflow import Bus, Line, Generator, Load, \
Transformer, TempResolution, GeneratorPqSet, LoadPqSet
session = oedb_session()
scenario = 'Status Quo'
# define relevant tables of generator table
pq_set_cols = ['temp_id', 'p_set']
# choose temp_id
temp_id_set = 1
start_h = 1
end_h = 2
# examplary call of pq-set retrieval
gen_pq_set = get_pq_sets(session, GeneratorPqSet, scenario,
index_col='generator_id', columns=pq_set_cols)
load_pq_set = get_pq_sets(session, LoadPqSet, scenario, index_col='load_id',
columns=pq_set_cols)
# define investigated time range
timerange = get_timerange(session, temp_id_set, TempResolution, start_h, end_h)
# define relevant tables
tables = [Bus, Line, Generator, Load, Transformer]
# get components from database tables
components = import_components(tables, session, scenario)
# create PyPSA powerflow problem
network, snapshots = create_powerflow_problem(timerange, components)
# import pq-set tables to pypsa network
pq_object = [GeneratorPqSet, LoadPqSet]
network = import_pq_sets(session,
network,
pq_object,
timerange,
scenario)
# add coordinates to network nodes and make ready for map plotting
network = add_coordinates(network)
# start powerflow calculations
network.pf(snapshots)
# make a line loading plot
plot_line_loading(network, output='show')
# close session
session.close()
|
Add example for hv gridfrom tools.pypsa_io import oedb_session, get_pq_sets,\
get_timerange, import_components, import_pq_sets, create_powerflow_problem,\
add_coordinates, plot_line_loading
from egoio.db_tables.calc_ego_hv_powerflow import Bus, Line, Generator, Load, \
Transformer, TempResolution, GeneratorPqSet, LoadPqSet
session = oedb_session()
scenario = 'Status Quo'
# define relevant tables of generator table
pq_set_cols = ['temp_id', 'p_set']
# choose temp_id
temp_id_set = 1
start_h = 1
end_h = 2
# examplary call of pq-set retrieval
gen_pq_set = get_pq_sets(session, GeneratorPqSet, scenario,
index_col='generator_id', columns=pq_set_cols)
load_pq_set = get_pq_sets(session, LoadPqSet, scenario, index_col='load_id',
columns=pq_set_cols)
# define investigated time range
timerange = get_timerange(session, temp_id_set, TempResolution, start_h, end_h)
# define relevant tables
tables = [Bus, Line, Generator, Load, Transformer]
# get components from database tables
components = import_components(tables, session, scenario)
# create PyPSA powerflow problem
network, snapshots = create_powerflow_problem(timerange, components)
# import pq-set tables to pypsa network
pq_object = [GeneratorPqSet, LoadPqSet]
network = import_pq_sets(session,
network,
pq_object,
timerange,
scenario)
# add coordinates to network nodes and make ready for map plotting
network = add_coordinates(network)
# start powerflow calculations
network.pf(snapshots)
# make a line loading plot
plot_line_loading(network, output='show')
# close session
session.close()
|
<commit_before><commit_msg>Add example for hv grid<commit_after>from tools.pypsa_io import oedb_session, get_pq_sets,\
get_timerange, import_components, import_pq_sets, create_powerflow_problem,\
add_coordinates, plot_line_loading
from egoio.db_tables.calc_ego_hv_powerflow import Bus, Line, Generator, Load, \
Transformer, TempResolution, GeneratorPqSet, LoadPqSet
session = oedb_session()
scenario = 'Status Quo'
# define relevant tables of generator table
pq_set_cols = ['temp_id', 'p_set']
# choose temp_id
temp_id_set = 1
start_h = 1
end_h = 2
# examplary call of pq-set retrieval
gen_pq_set = get_pq_sets(session, GeneratorPqSet, scenario,
index_col='generator_id', columns=pq_set_cols)
load_pq_set = get_pq_sets(session, LoadPqSet, scenario, index_col='load_id',
columns=pq_set_cols)
# define investigated time range
timerange = get_timerange(session, temp_id_set, TempResolution, start_h, end_h)
# define relevant tables
tables = [Bus, Line, Generator, Load, Transformer]
# get components from database tables
components = import_components(tables, session, scenario)
# create PyPSA powerflow problem
network, snapshots = create_powerflow_problem(timerange, components)
# import pq-set tables to pypsa network
pq_object = [GeneratorPqSet, LoadPqSet]
network = import_pq_sets(session,
network,
pq_object,
timerange,
scenario)
# add coordinates to network nodes and make ready for map plotting
network = add_coordinates(network)
# start powerflow calculations
network.pf(snapshots)
# make a line loading plot
plot_line_loading(network, output='show')
# close session
session.close()
|
|
5923675c35a21d442860b58e04b8f3c5ba749dda
|
common/djangoapps/student/management/commands/set_superuser.py
|
common/djangoapps/student/management/commands/set_superuser.py
|
"""Management command to grant or revoke superuser access for one or more users"""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""Management command to grant or revoke superuser access for one or more users"""
option_list = BaseCommand.option_list + (
make_option('--unset',
action='store_true',
dest='unset',
default=False,
help='Set is_superuser to False instead of True'),
)
args = '<user|email> [user|email ...]>'
help = """
This command will set is_superuser to true for one or more users.
Lookup by username or email address, assumes usernames
do not look like email addresses.
"""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is set_superuser {0}'.format(self.args))
for user in args:
try:
if '@' in user:
userobj = User.objects.get(email=user)
else:
userobj = User.objects.get(username=user)
if options['unset']:
userobj.is_superuser = False
else:
userobj.is_superuser = True
userobj.save()
except Exception as err: # pylint: disable=broad-except
print "Error modifying user with identifier {}: {}: {}".format(user, type(err).__name__, err.message)
print 'Success!'
|
Add management command for granting superuser access
|
Add management command for granting superuser access
|
Python
|
agpl-3.0
|
jamiefolsom/edx-platform,alexthered/kienhoc-platform,Ayub-Khan/edx-platform,philanthropy-u/edx-platform,gsehub/edx-platform,devs1991/test_edx_docmode,don-github/edx-platform,kursitet/edx-platform,synergeticsedx/deployment-wipro,procangroup/edx-platform,alexthered/kienhoc-platform,IndonesiaX/edx-platform,ZLLab-Mooc/edx-platform,xingyepei/edx-platform,zofuthan/edx-platform,msegado/edx-platform,ovnicraft/edx-platform,etzhou/edx-platform,prarthitm/edxplatform,IndonesiaX/edx-platform,mahendra-r/edx-platform,jzoldak/edx-platform,amir-qayyum-khan/edx-platform,deepsrijit1105/edx-platform,CourseTalk/edx-platform,edx-solutions/edx-platform,ahmedaljazzar/edx-platform,mahendra-r/edx-platform,fly19890211/edx-platform,louyihua/edx-platform,ESOedX/edx-platform,Softmotions/edx-platform,deepsrijit1105/edx-platform,doismellburning/edx-platform,pabloborrego93/edx-platform,don-github/edx-platform,ZLLab-Mooc/edx-platform,edx-solutions/edx-platform,hastexo/edx-platform,franosincic/edx-platform,caesar2164/edx-platform,msegado/edx-platform,RPI-OPENEDX/edx-platform,ovnicraft/edx-platform,hastexo/edx-platform,CredoReference/edx-platform,ferabra/edx-platform,jjmiranda/edx-platform,adoosii/edx-platform,bigdatauniversity/edx-platform,defance/edx-platform,hastexo/edx-platform,MakeHer/edx-platform,angelapper/edx-platform,chrisndodge/edx-platform,gymnasium/edx-platform,zhenzhai/edx-platform,Endika/edx-platform,a-parhom/edx-platform,amir-qayyum-khan/edx-platform,Edraak/edraak-platform,synergeticsedx/deployment-wipro,ubc/edx-platform,etzhou/edx-platform,shabab12/edx-platform,TeachAtTUM/edx-platform,doganov/edx-platform,doganov/edx-platform,proversity-org/edx-platform,louyihua/edx-platform,appsembler/edx-platform,Edraak/edx-platform,mbareta/edx-platform-ft,MakeHer/edx-platform,chrisndodge/edx-platform,cecep-edu/edx-platform,ahmedaljazzar/edx-platform,ovnicraft/edx-platform,fintech-circle/edx-platform,gsehub/edx-platform,4eek/edx-platform,jjmiranda/edx-platform,raccoongang/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,zhenzhai/edx-platform,kmoocdev2/edx-platform,cecep-edu/edx-platform,EDUlib/edx-platform,shabab12/edx-platform,4eek/edx-platform,adoosii/edx-platform,pepeportela/edx-platform,nttks/edx-platform,Edraak/circleci-edx-platform,cognitiveclass/edx-platform,waheedahmed/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,Ayub-Khan/edx-platform,4eek/edx-platform,cognitiveclass/edx-platform,fintech-circle/edx-platform,etzhou/edx-platform,ESOedX/edx-platform,cognitiveclass/edx-platform,alu042/edx-platform,solashirai/edx-platform,CourseTalk/edx-platform,eduNEXT/edx-platform,ahmadiga/min_edx,playm2mboy/edx-platform,10clouds/edx-platform,kursitet/edx-platform,teltek/edx-platform,IONISx/edx-platform,defance/edx-platform,amir-qayyum-khan/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,romain-li/edx-platform,vikas1885/test1,franosincic/edx-platform,caesar2164/edx-platform,10clouds/edx-platform,mitocw/edx-platform,pepeportela/edx-platform,pomegranited/edx-platform,edry/edx-platform,naresh21/synergetics-edx-platform,gymnasium/edx-platform,rismalrv/edx-platform,stvstnfrd/edx-platform,Softmotions/edx-platform,BehavioralInsightsTeam/edx-platform,shurihell/testasia,UOMx/edx-platform,etzhou/edx-platform,adoosii/edx-platform,doganov/edx-platform,alexthered/kienhoc-platform,cpennington/edx-platform,devs1991/test_edx_docmode,cecep-edu/edx-platform,ferabra/edx-platform,Lektorium-LLC/edx-platform,appsembler/edx-platform,proversity-org/edx-platform,mahendra-r/edx-platform,stvstnfrd/edx-platform,hamzehd/edx-platform,antoviaque/edx-platform,jbzdak/edx-platform,simbs/edx-platform,jbzdak/edx-platform,Edraak/edx-platform,kursitet/edx-platform,mbareta/edx-platform-ft,jamiefolsom/edx-platform,zhenzhai/edx-platform,jolyonb/edx-platform,nttks/edx-platform,don-github/edx-platform,pabloborrego93/edx-platform,miptliot/edx-platform,doismellburning/edx-platform,tanmaykm/edx-platform,deepsrijit1105/edx-platform,procangroup/edx-platform,rismalrv/edx-platform,vikas1885/test1,playm2mboy/edx-platform,iivic/BoiseStateX,ubc/edx-platform,teltek/edx-platform,halvertoluke/edx-platform,simbs/edx-platform,doismellburning/edx-platform,raccoongang/edx-platform,naresh21/synergetics-edx-platform,angelapper/edx-platform,romain-li/edx-platform,Ayub-Khan/edx-platform,zofuthan/edx-platform,JioEducation/edx-platform,xingyepei/edx-platform,devs1991/test_edx_docmode,inares/edx-platform,zhenzhai/edx-platform,defance/edx-platform,kmoocdev2/edx-platform,devs1991/test_edx_docmode,marcore/edx-platform,kxliugang/edx-platform,miptliot/edx-platform,simbs/edx-platform,angelapper/edx-platform,4eek/edx-platform,arbrandes/edx-platform,halvertoluke/edx-platform,EDUlib/edx-platform,Edraak/circleci-edx-platform,hastexo/edx-platform,ahmadiga/min_edx,UOMx/edx-platform,zubair-arbi/edx-platform,chrisndodge/edx-platform,gymnasium/edx-platform,bigdatauniversity/edx-platform,Livit/Livit.Learn.EdX,xinjiguaike/edx-platform,JCBarahona/edX,msegado/edx-platform,waheedahmed/edx-platform,eduNEXT/edunext-platform,mahendra-r/edx-platform,MakeHer/edx-platform,jzoldak/edx-platform,ubc/edx-platform,bigdatauniversity/edx-platform,ampax/edx-platform,xingyepei/edx-platform,iivic/BoiseStateX,ferabra/edx-platform,fintech-circle/edx-platform,xingyepei/edx-platform,itsjeyd/edx-platform,edry/edx-platform,longmen21/edx-platform,stvstnfrd/edx-platform,solashirai/edx-platform,cpennington/edx-platform,xingyepei/edx-platform,etzhou/edx-platform,vikas1885/test1,Stanford-Online/edx-platform,CredoReference/edx-platform,Edraak/edraak-platform,jbzdak/edx-platform,ahmedaljazzar/edx-platform,eduNEXT/edunext-platform,doganov/edx-platform,solashirai/edx-platform,adoosii/edx-platform,a-parhom/edx-platform,eduNEXT/edunext-platform,shurihell/testasia,philanthropy-u/edx-platform,kmoocdev2/edx-platform,nttks/edx-platform,inares/edx-platform,alu042/edx-platform,iivic/BoiseStateX,fly19890211/edx-platform,Stanford-Online/edx-platform,analyseuc3m/ANALYSE-v1,alu042/edx-platform,rismalrv/edx-platform,lduarte1991/edx-platform,romain-li/edx-platform,inares/edx-platform,CourseTalk/edx-platform,arbrandes/edx-platform,rismalrv/edx-platform,itsjeyd/edx-platform,IndonesiaX/edx-platform,Ayub-Khan/edx-platform,Endika/edx-platform,jzoldak/edx-platform,longmen21/edx-platform,IONISx/edx-platform,Edraak/edx-platform,xinjiguaike/edx-platform,shurihell/testasia,a-parhom/edx-platform,Livit/Livit.Learn.EdX,Edraak/circleci-edx-platform,prarthitm/edxplatform,kmoocdev2/edx-platform,antoviaque/edx-platform,alexthered/kienhoc-platform,analyseuc3m/ANALYSE-v1,edry/edx-platform,ZLLab-Mooc/edx-platform,MakeHer/edx-platform,JCBarahona/edX,kxliugang/edx-platform,EDUlib/edx-platform,gsehub/edx-platform,marcore/edx-platform,edx/edx-platform,wwj718/edx-platform,devs1991/test_edx_docmode,Stanford-Online/edx-platform,teltek/edx-platform,jolyonb/edx-platform,cognitiveclass/edx-platform,longmen21/edx-platform,jolyonb/edx-platform,Lektorium-LLC/edx-platform,halvertoluke/edx-platform,xinjiguaike/edx-platform,angelapper/edx-platform,appsembler/edx-platform,prarthitm/edxplatform,ESOedX/edx-platform,pepeportela/edx-platform,JioEducation/edx-platform,mitocw/edx-platform,Softmotions/edx-platform,zubair-arbi/edx-platform,mitocw/edx-platform,caesar2164/edx-platform,teltek/edx-platform,amir-qayyum-khan/edx-platform,lduarte1991/edx-platform,edry/edx-platform,mcgachey/edx-platform,franosincic/edx-platform,proversity-org/edx-platform,ahmadiga/min_edx,fly19890211/edx-platform,kursitet/edx-platform,louyihua/edx-platform,inares/edx-platform,jbzdak/edx-platform,chrisndodge/edx-platform,rismalrv/edx-platform,marcore/edx-platform,RPI-OPENEDX/edx-platform,alexthered/kienhoc-platform,Ayub-Khan/edx-platform,mahendra-r/edx-platform,jamiefolsom/edx-platform,wwj718/edx-platform,franosincic/edx-platform,mcgachey/edx-platform,devs1991/test_edx_docmode,franosincic/edx-platform,miptliot/edx-platform,mbareta/edx-platform-ft,JCBarahona/edX,cognitiveclass/edx-platform,tanmaykm/edx-platform,jolyonb/edx-platform,fly19890211/edx-platform,doismellburning/edx-platform,jbzdak/edx-platform,bigdatauniversity/edx-platform,edx/edx-platform,CourseTalk/edx-platform,iivic/BoiseStateX,philanthropy-u/edx-platform,waheedahmed/edx-platform,hamzehd/edx-platform,ZLLab-Mooc/edx-platform,halvertoluke/edx-platform,waheedahmed/edx-platform,devs1991/test_edx_docmode,synergeticsedx/deployment-wipro,10clouds/edx-platform,lduarte1991/edx-platform,IONISx/edx-platform,itsjeyd/edx-platform,zubair-arbi/edx-platform,raccoongang/edx-platform,halvertoluke/edx-platform,pomegranited/edx-platform,Softmotions/edx-platform,fly19890211/edx-platform,edx/edx-platform,alu042/edx-platform,Livit/Livit.Learn.EdX,kxliugang/edx-platform,solashirai/edx-platform,miptliot/edx-platform,itsjeyd/edx-platform,10clouds/edx-platform,kxliugang/edx-platform,jjmiranda/edx-platform,ovnicraft/edx-platform,IndonesiaX/edx-platform,hamzehd/edx-platform,shabab12/edx-platform,Lektorium-LLC/edx-platform,EDUlib/edx-platform,ampax/edx-platform,simbs/edx-platform,longmen21/edx-platform,antoviaque/edx-platform,kxliugang/edx-platform,nttks/edx-platform,procangroup/edx-platform,eduNEXT/edx-platform,doismellburning/edx-platform,don-github/edx-platform,pomegranited/edx-platform,doganov/edx-platform,TeachAtTUM/edx-platform,kursitet/edx-platform,eduNEXT/edx-platform,edx/edx-platform,shurihell/testasia,procangroup/edx-platform,Stanford-Online/edx-platform,mbareta/edx-platform-ft,appsembler/edx-platform,pomegranited/edx-platform,romain-li/edx-platform,cpennington/edx-platform,Softmotions/edx-platform,RPI-OPENEDX/edx-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,wwj718/edx-platform,tanmaykm/edx-platform,iivic/BoiseStateX,romain-li/edx-platform,ovnicraft/edx-platform,pabloborrego93/edx-platform,JCBarahona/edX,JCBarahona/edX,JioEducation/edx-platform,proversity-org/edx-platform,zubair-arbi/edx-platform,Lektorium-LLC/edx-platform,MakeHer/edx-platform,zofuthan/edx-platform,vikas1885/test1,raccoongang/edx-platform,nttks/edx-platform,jamiefolsom/edx-platform,jzoldak/edx-platform,CredoReference/edx-platform,antoviaque/edx-platform,RPI-OPENEDX/edx-platform,mitocw/edx-platform,playm2mboy/edx-platform,edx-solutions/edx-platform,adoosii/edx-platform,JioEducation/edx-platform,cecep-edu/edx-platform,longmen21/edx-platform,wwj718/edx-platform,pabloborrego93/edx-platform,mcgachey/edx-platform,ESOedX/edx-platform,mcgachey/edx-platform,pomegranited/edx-platform,louyihua/edx-platform,jjmiranda/edx-platform,zofuthan/edx-platform,Edraak/edx-platform,Edraak/circleci-edx-platform,playm2mboy/edx-platform,gymnasium/edx-platform,arbrandes/edx-platform,zhenzhai/edx-platform,ubc/edx-platform,defance/edx-platform,Endika/edx-platform,bigdatauniversity/edx-platform,arbrandes/edx-platform,Edraak/edraak-platform,ubc/edx-platform,xinjiguaike/edx-platform,hamzehd/edx-platform,pepeportela/edx-platform,don-github/edx-platform,UOMx/edx-platform,IndonesiaX/edx-platform,shurihell/testasia,prarthitm/edxplatform,eduNEXT/edx-platform,vikas1885/test1,analyseuc3m/ANALYSE-v1,zubair-arbi/edx-platform,IONISx/edx-platform,simbs/edx-platform,marcore/edx-platform,naresh21/synergetics-edx-platform,ahmadiga/min_edx,CredoReference/edx-platform,caesar2164/edx-platform,a-parhom/edx-platform,ferabra/edx-platform,ampax/edx-platform,UOMx/edx-platform,stvstnfrd/edx-platform,ferabra/edx-platform,xinjiguaike/edx-platform,fintech-circle/edx-platform,Endika/edx-platform,cecep-edu/edx-platform,waheedahmed/edx-platform,hamzehd/edx-platform,inares/edx-platform,shabab12/edx-platform,analyseuc3m/ANALYSE-v1,devs1991/test_edx_docmode,wwj718/edx-platform,edx-solutions/edx-platform,TeachAtTUM/edx-platform,ahmadiga/min_edx,synergeticsedx/deployment-wipro,lduarte1991/edx-platform,deepsrijit1105/edx-platform,BehavioralInsightsTeam/edx-platform,playm2mboy/edx-platform,ZLLab-Mooc/edx-platform,TeachAtTUM/edx-platform,ahmedaljazzar/edx-platform,Edraak/circleci-edx-platform,IONISx/edx-platform,tanmaykm/edx-platform,RPI-OPENEDX/edx-platform,jamiefolsom/edx-platform,edry/edx-platform,BehavioralInsightsTeam/edx-platform,4eek/edx-platform,cpennington/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform,Edraak/edx-platform,solashirai/edx-platform,Livit/Livit.Learn.EdX,mcgachey/edx-platform,zofuthan/edx-platform
|
Add management command for granting superuser access
|
"""Management command to grant or revoke superuser access for one or more users"""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""Management command to grant or revoke superuser access for one or more users"""
option_list = BaseCommand.option_list + (
make_option('--unset',
action='store_true',
dest='unset',
default=False,
help='Set is_superuser to False instead of True'),
)
args = '<user|email> [user|email ...]>'
help = """
This command will set is_superuser to true for one or more users.
Lookup by username or email address, assumes usernames
do not look like email addresses.
"""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is set_superuser {0}'.format(self.args))
for user in args:
try:
if '@' in user:
userobj = User.objects.get(email=user)
else:
userobj = User.objects.get(username=user)
if options['unset']:
userobj.is_superuser = False
else:
userobj.is_superuser = True
userobj.save()
except Exception as err: # pylint: disable=broad-except
print "Error modifying user with identifier {}: {}: {}".format(user, type(err).__name__, err.message)
print 'Success!'
|
<commit_before><commit_msg>Add management command for granting superuser access<commit_after>
|
"""Management command to grant or revoke superuser access for one or more users"""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""Management command to grant or revoke superuser access for one or more users"""
option_list = BaseCommand.option_list + (
make_option('--unset',
action='store_true',
dest='unset',
default=False,
help='Set is_superuser to False instead of True'),
)
args = '<user|email> [user|email ...]>'
help = """
This command will set is_superuser to true for one or more users.
Lookup by username or email address, assumes usernames
do not look like email addresses.
"""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is set_superuser {0}'.format(self.args))
for user in args:
try:
if '@' in user:
userobj = User.objects.get(email=user)
else:
userobj = User.objects.get(username=user)
if options['unset']:
userobj.is_superuser = False
else:
userobj.is_superuser = True
userobj.save()
except Exception as err: # pylint: disable=broad-except
print "Error modifying user with identifier {}: {}: {}".format(user, type(err).__name__, err.message)
print 'Success!'
|
Add management command for granting superuser access"""Management command to grant or revoke superuser access for one or more users"""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""Management command to grant or revoke superuser access for one or more users"""
option_list = BaseCommand.option_list + (
make_option('--unset',
action='store_true',
dest='unset',
default=False,
help='Set is_superuser to False instead of True'),
)
args = '<user|email> [user|email ...]>'
help = """
This command will set is_superuser to true for one or more users.
Lookup by username or email address, assumes usernames
do not look like email addresses.
"""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is set_superuser {0}'.format(self.args))
for user in args:
try:
if '@' in user:
userobj = User.objects.get(email=user)
else:
userobj = User.objects.get(username=user)
if options['unset']:
userobj.is_superuser = False
else:
userobj.is_superuser = True
userobj.save()
except Exception as err: # pylint: disable=broad-except
print "Error modifying user with identifier {}: {}: {}".format(user, type(err).__name__, err.message)
print 'Success!'
|
<commit_before><commit_msg>Add management command for granting superuser access<commit_after>"""Management command to grant or revoke superuser access for one or more users"""
from optparse import make_option
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
"""Management command to grant or revoke superuser access for one or more users"""
option_list = BaseCommand.option_list + (
make_option('--unset',
action='store_true',
dest='unset',
default=False,
help='Set is_superuser to False instead of True'),
)
args = '<user|email> [user|email ...]>'
help = """
This command will set is_superuser to true for one or more users.
Lookup by username or email address, assumes usernames
do not look like email addresses.
"""
def handle(self, *args, **options):
if len(args) < 1:
raise CommandError('Usage is set_superuser {0}'.format(self.args))
for user in args:
try:
if '@' in user:
userobj = User.objects.get(email=user)
else:
userobj = User.objects.get(username=user)
if options['unset']:
userobj.is_superuser = False
else:
userobj.is_superuser = True
userobj.save()
except Exception as err: # pylint: disable=broad-except
print "Error modifying user with identifier {}: {}: {}".format(user, type(err).__name__, err.message)
print 'Success!'
|
|
ade8079d2e455899381bba1738373f734e2d3508
|
parser.py
|
parser.py
|
from collections import namedtuple
IRCMsg = namedtuple('IRCMsg', 'prefix cmd params postfix')
def parse(line):
""" Parses line and returns a named tuple IRCMsg
with fields (prefix, cmd, params, postfix).
- prefix is the first part starting with : (colon), without the :
- cmd is the command
- params are the parameters for the command,
not including the possible postfix
- postfix is the part of the parameters starting with :, without the :
"""
if not line:
return None
prefix = ''
command = ''
params = ''
postfix = ''
# prefix is present is line starts with ':'
if line[0] == ':':
prefix, line = line.split(' ', 1)
# there might be more than one space between
# the possible prefix and command, so we'll strip them
command, line = line.lstrip().split(' ', 1)
# postfix is present is line has ':'
index = line.find(':')
if index != -1:
params = line[:index]
postfix = line[index:]
else:
params = line
# command and params must be non-empty
if len(command) == 0 or len(params) == 0:
return None
return IRCMsg(prefix=prefix[1:], cmd=command, params=params,
postfix=postfix[1:])
|
Add module for parsing IRC messages
|
Add module for parsing IRC messages
|
Python
|
mit
|
aalien/mib
|
Add module for parsing IRC messages
|
from collections import namedtuple
IRCMsg = namedtuple('IRCMsg', 'prefix cmd params postfix')
def parse(line):
""" Parses line and returns a named tuple IRCMsg
with fields (prefix, cmd, params, postfix).
- prefix is the first part starting with : (colon), without the :
- cmd is the command
- params are the parameters for the command,
not including the possible postfix
- postfix is the part of the parameters starting with :, without the :
"""
if not line:
return None
prefix = ''
command = ''
params = ''
postfix = ''
# prefix is present is line starts with ':'
if line[0] == ':':
prefix, line = line.split(' ', 1)
# there might be more than one space between
# the possible prefix and command, so we'll strip them
command, line = line.lstrip().split(' ', 1)
# postfix is present is line has ':'
index = line.find(':')
if index != -1:
params = line[:index]
postfix = line[index:]
else:
params = line
# command and params must be non-empty
if len(command) == 0 or len(params) == 0:
return None
return IRCMsg(prefix=prefix[1:], cmd=command, params=params,
postfix=postfix[1:])
|
<commit_before><commit_msg>Add module for parsing IRC messages<commit_after>
|
from collections import namedtuple
IRCMsg = namedtuple('IRCMsg', 'prefix cmd params postfix')
def parse(line):
""" Parses line and returns a named tuple IRCMsg
with fields (prefix, cmd, params, postfix).
- prefix is the first part starting with : (colon), without the :
- cmd is the command
- params are the parameters for the command,
not including the possible postfix
- postfix is the part of the parameters starting with :, without the :
"""
if not line:
return None
prefix = ''
command = ''
params = ''
postfix = ''
# prefix is present is line starts with ':'
if line[0] == ':':
prefix, line = line.split(' ', 1)
# there might be more than one space between
# the possible prefix and command, so we'll strip them
command, line = line.lstrip().split(' ', 1)
# postfix is present is line has ':'
index = line.find(':')
if index != -1:
params = line[:index]
postfix = line[index:]
else:
params = line
# command and params must be non-empty
if len(command) == 0 or len(params) == 0:
return None
return IRCMsg(prefix=prefix[1:], cmd=command, params=params,
postfix=postfix[1:])
|
Add module for parsing IRC messagesfrom collections import namedtuple
IRCMsg = namedtuple('IRCMsg', 'prefix cmd params postfix')
def parse(line):
""" Parses line and returns a named tuple IRCMsg
with fields (prefix, cmd, params, postfix).
- prefix is the first part starting with : (colon), without the :
- cmd is the command
- params are the parameters for the command,
not including the possible postfix
- postfix is the part of the parameters starting with :, without the :
"""
if not line:
return None
prefix = ''
command = ''
params = ''
postfix = ''
# prefix is present is line starts with ':'
if line[0] == ':':
prefix, line = line.split(' ', 1)
# there might be more than one space between
# the possible prefix and command, so we'll strip them
command, line = line.lstrip().split(' ', 1)
# postfix is present is line has ':'
index = line.find(':')
if index != -1:
params = line[:index]
postfix = line[index:]
else:
params = line
# command and params must be non-empty
if len(command) == 0 or len(params) == 0:
return None
return IRCMsg(prefix=prefix[1:], cmd=command, params=params,
postfix=postfix[1:])
|
<commit_before><commit_msg>Add module for parsing IRC messages<commit_after>from collections import namedtuple
IRCMsg = namedtuple('IRCMsg', 'prefix cmd params postfix')
def parse(line):
""" Parses line and returns a named tuple IRCMsg
with fields (prefix, cmd, params, postfix).
- prefix is the first part starting with : (colon), without the :
- cmd is the command
- params are the parameters for the command,
not including the possible postfix
- postfix is the part of the parameters starting with :, without the :
"""
if not line:
return None
prefix = ''
command = ''
params = ''
postfix = ''
# prefix is present is line starts with ':'
if line[0] == ':':
prefix, line = line.split(' ', 1)
# there might be more than one space between
# the possible prefix and command, so we'll strip them
command, line = line.lstrip().split(' ', 1)
# postfix is present is line has ':'
index = line.find(':')
if index != -1:
params = line[:index]
postfix = line[index:]
else:
params = line
# command and params must be non-empty
if len(command) == 0 or len(params) == 0:
return None
return IRCMsg(prefix=prefix[1:], cmd=command, params=params,
postfix=postfix[1:])
|
|
39440a6fe065f29440e6f819dedc52f1ef47774c
|
test/models/test_software.py
|
test/models/test_software.py
|
from test.base import ApiDBTestCase
from zou.app.models.software import Software
from zou.app.utils import fields
class SoftwareTestCase(ApiDBTestCase):
def setUp(self):
super(SoftwareTestCase, self).setUp()
self.generate_data(Software, 3)
def test_get_softwares(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
def test_get_software(self):
software = self.get_first("data/softwares")
software_again = self.get("data/softwares/%s" % software["id"])
self.assertEquals(software, software_again)
self.get_404("data/softwares/%s" % fields.gen_uuid())
def test_create_software(self):
data = {
"name": "3dsMax",
"short_name": "max",
"file_extension": ".max"
}
self.software = self.post("data/softwares", data)
self.assertIsNotNone(self.software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 4)
def test_update_software(self):
software = self.get_first("data/softwares")
data = {
"name": "Maya",
"file_extension": ".ma",
}
self.put("data/softwares/%s" % software["id"], data)
software_again = self.get(
"data/softwares/%s" % software["id"])
self.assertEquals(data["name"], software_again["name"])
self.put_404("data/softwares/%s" % fields.gen_uuid(), data)
def test_delete_software(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
software = softwares[0]
self.delete("data/softwares/%s" % software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 2)
self.delete_404("data/softwares/%s" % fields.gen_uuid())
|
Add tests for software model crud routes
|
Add tests for software model crud routes
|
Python
|
agpl-3.0
|
cgwire/zou
|
Add tests for software model crud routes
|
from test.base import ApiDBTestCase
from zou.app.models.software import Software
from zou.app.utils import fields
class SoftwareTestCase(ApiDBTestCase):
def setUp(self):
super(SoftwareTestCase, self).setUp()
self.generate_data(Software, 3)
def test_get_softwares(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
def test_get_software(self):
software = self.get_first("data/softwares")
software_again = self.get("data/softwares/%s" % software["id"])
self.assertEquals(software, software_again)
self.get_404("data/softwares/%s" % fields.gen_uuid())
def test_create_software(self):
data = {
"name": "3dsMax",
"short_name": "max",
"file_extension": ".max"
}
self.software = self.post("data/softwares", data)
self.assertIsNotNone(self.software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 4)
def test_update_software(self):
software = self.get_first("data/softwares")
data = {
"name": "Maya",
"file_extension": ".ma",
}
self.put("data/softwares/%s" % software["id"], data)
software_again = self.get(
"data/softwares/%s" % software["id"])
self.assertEquals(data["name"], software_again["name"])
self.put_404("data/softwares/%s" % fields.gen_uuid(), data)
def test_delete_software(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
software = softwares[0]
self.delete("data/softwares/%s" % software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 2)
self.delete_404("data/softwares/%s" % fields.gen_uuid())
|
<commit_before><commit_msg>Add tests for software model crud routes<commit_after>
|
from test.base import ApiDBTestCase
from zou.app.models.software import Software
from zou.app.utils import fields
class SoftwareTestCase(ApiDBTestCase):
def setUp(self):
super(SoftwareTestCase, self).setUp()
self.generate_data(Software, 3)
def test_get_softwares(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
def test_get_software(self):
software = self.get_first("data/softwares")
software_again = self.get("data/softwares/%s" % software["id"])
self.assertEquals(software, software_again)
self.get_404("data/softwares/%s" % fields.gen_uuid())
def test_create_software(self):
data = {
"name": "3dsMax",
"short_name": "max",
"file_extension": ".max"
}
self.software = self.post("data/softwares", data)
self.assertIsNotNone(self.software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 4)
def test_update_software(self):
software = self.get_first("data/softwares")
data = {
"name": "Maya",
"file_extension": ".ma",
}
self.put("data/softwares/%s" % software["id"], data)
software_again = self.get(
"data/softwares/%s" % software["id"])
self.assertEquals(data["name"], software_again["name"])
self.put_404("data/softwares/%s" % fields.gen_uuid(), data)
def test_delete_software(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
software = softwares[0]
self.delete("data/softwares/%s" % software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 2)
self.delete_404("data/softwares/%s" % fields.gen_uuid())
|
Add tests for software model crud routesfrom test.base import ApiDBTestCase
from zou.app.models.software import Software
from zou.app.utils import fields
class SoftwareTestCase(ApiDBTestCase):
def setUp(self):
super(SoftwareTestCase, self).setUp()
self.generate_data(Software, 3)
def test_get_softwares(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
def test_get_software(self):
software = self.get_first("data/softwares")
software_again = self.get("data/softwares/%s" % software["id"])
self.assertEquals(software, software_again)
self.get_404("data/softwares/%s" % fields.gen_uuid())
def test_create_software(self):
data = {
"name": "3dsMax",
"short_name": "max",
"file_extension": ".max"
}
self.software = self.post("data/softwares", data)
self.assertIsNotNone(self.software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 4)
def test_update_software(self):
software = self.get_first("data/softwares")
data = {
"name": "Maya",
"file_extension": ".ma",
}
self.put("data/softwares/%s" % software["id"], data)
software_again = self.get(
"data/softwares/%s" % software["id"])
self.assertEquals(data["name"], software_again["name"])
self.put_404("data/softwares/%s" % fields.gen_uuid(), data)
def test_delete_software(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
software = softwares[0]
self.delete("data/softwares/%s" % software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 2)
self.delete_404("data/softwares/%s" % fields.gen_uuid())
|
<commit_before><commit_msg>Add tests for software model crud routes<commit_after>from test.base import ApiDBTestCase
from zou.app.models.software import Software
from zou.app.utils import fields
class SoftwareTestCase(ApiDBTestCase):
def setUp(self):
super(SoftwareTestCase, self).setUp()
self.generate_data(Software, 3)
def test_get_softwares(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
def test_get_software(self):
software = self.get_first("data/softwares")
software_again = self.get("data/softwares/%s" % software["id"])
self.assertEquals(software, software_again)
self.get_404("data/softwares/%s" % fields.gen_uuid())
def test_create_software(self):
data = {
"name": "3dsMax",
"short_name": "max",
"file_extension": ".max"
}
self.software = self.post("data/softwares", data)
self.assertIsNotNone(self.software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 4)
def test_update_software(self):
software = self.get_first("data/softwares")
data = {
"name": "Maya",
"file_extension": ".ma",
}
self.put("data/softwares/%s" % software["id"], data)
software_again = self.get(
"data/softwares/%s" % software["id"])
self.assertEquals(data["name"], software_again["name"])
self.put_404("data/softwares/%s" % fields.gen_uuid(), data)
def test_delete_software(self):
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 3)
software = softwares[0]
self.delete("data/softwares/%s" % software["id"])
softwares = self.get("data/softwares")
self.assertEquals(len(softwares), 2)
self.delete_404("data/softwares/%s" % fields.gen_uuid())
|
|
726b8d169923e31e1efbd1385e9f29f5fd8793f4
|
tests/create-task.py
|
tests/create-task.py
|
#!/usr/bin/env python
import sys, dbus, dbus.glib
#import pdb
# Get the D-Bus Session Bus
bus = dbus.SessionBus()
#Access the ICEcore Daemon Object
obj = bus.get_object("org.gnome.Tasque",
"/org/gnome/Tasque/RemoteControl")
#Access the remote control interface
tasque = dbus.Interface(obj, "org.gnome.Tasque.RemoteControl")
for n in tasque.GetCategoryNames():
print n
taskId = tasque.CreateTask ("Tasque", "Create a task via DBus", True)
print "taskId: " + taskId
|
Move python test script to tests (part 2)
|
Move python test script to tests (part 2)
|
Python
|
mit
|
mono-soc-2012/Tasque,mono-soc-2012/Tasque,mono-soc-2012/Tasque
|
Move python test script to tests (part 2)
|
#!/usr/bin/env python
import sys, dbus, dbus.glib
#import pdb
# Get the D-Bus Session Bus
bus = dbus.SessionBus()
#Access the ICEcore Daemon Object
obj = bus.get_object("org.gnome.Tasque",
"/org/gnome/Tasque/RemoteControl")
#Access the remote control interface
tasque = dbus.Interface(obj, "org.gnome.Tasque.RemoteControl")
for n in tasque.GetCategoryNames():
print n
taskId = tasque.CreateTask ("Tasque", "Create a task via DBus", True)
print "taskId: " + taskId
|
<commit_before><commit_msg>Move python test script to tests (part 2)<commit_after>
|
#!/usr/bin/env python
import sys, dbus, dbus.glib
#import pdb
# Get the D-Bus Session Bus
bus = dbus.SessionBus()
#Access the ICEcore Daemon Object
obj = bus.get_object("org.gnome.Tasque",
"/org/gnome/Tasque/RemoteControl")
#Access the remote control interface
tasque = dbus.Interface(obj, "org.gnome.Tasque.RemoteControl")
for n in tasque.GetCategoryNames():
print n
taskId = tasque.CreateTask ("Tasque", "Create a task via DBus", True)
print "taskId: " + taskId
|
Move python test script to tests (part 2)#!/usr/bin/env python
import sys, dbus, dbus.glib
#import pdb
# Get the D-Bus Session Bus
bus = dbus.SessionBus()
#Access the ICEcore Daemon Object
obj = bus.get_object("org.gnome.Tasque",
"/org/gnome/Tasque/RemoteControl")
#Access the remote control interface
tasque = dbus.Interface(obj, "org.gnome.Tasque.RemoteControl")
for n in tasque.GetCategoryNames():
print n
taskId = tasque.CreateTask ("Tasque", "Create a task via DBus", True)
print "taskId: " + taskId
|
<commit_before><commit_msg>Move python test script to tests (part 2)<commit_after>#!/usr/bin/env python
import sys, dbus, dbus.glib
#import pdb
# Get the D-Bus Session Bus
bus = dbus.SessionBus()
#Access the ICEcore Daemon Object
obj = bus.get_object("org.gnome.Tasque",
"/org/gnome/Tasque/RemoteControl")
#Access the remote control interface
tasque = dbus.Interface(obj, "org.gnome.Tasque.RemoteControl")
for n in tasque.GetCategoryNames():
print n
taskId = tasque.CreateTask ("Tasque", "Create a task via DBus", True)
print "taskId: " + taskId
|
|
9a3d3e70c193cd0dfef51d4a0d02ebc21b7933bc
|
final/usresident.py
|
final/usresident.py
|
## DO NOT MODIFY THE IMPLEMENTATION OF THE Person CLASS ##
class Person(object):
def __init__(self, name):
#create a person with name name
self.name = name
try:
firstBlank = name.rindex(' ')
self.lastName = name[firstBlank+1:]
except:
self.lastName = name
self.age = None
def getLastName(self):
#return self's last name
return self.lastName
def setAge(self, age):
#assumes age is an int greater than 0
#sets self's age to age (in years)
self.age = age
def getAge(self):
#assumes that self's age has been set
#returns self's current age in years
if self.age == None:
raise ValueError
return self.age
def __lt__(self, other):
#return True if self's name is lexicographically less
#than other's name, and False otherwise
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
def __str__(self):
#return self's name
return self.name
class USResident(Person):
"""
A Person who resides in the US.
"""
def __init__(self, name, status):
"""
Initializes a Person object. A USResident object inherits
from Person and has one additional attribute:
status: a string, one of "citizen", "legal_resident", "illegal_resident"
Raises a ValueError if status is not one of those 3 strings
"""
Person.__init__(self, name)
if status == "citizen" or status == "legal_resident" or status == "illegal_resident":
self.status = status
else:
raise ValueError
def getStatus(self):
"""
Returns the status
"""
return self.status
a = USResident('Tim Beaver', 'citizen')
print(a.getStatus())
b = USResident('Tim Horton', 'non-resident')
|
Implement two methods in the USResident class
|
Implement two methods in the USResident class
|
Python
|
mit
|
Kunal57/MIT_6.00.1x
|
Implement two methods in the USResident class
|
## DO NOT MODIFY THE IMPLEMENTATION OF THE Person CLASS ##
class Person(object):
def __init__(self, name):
#create a person with name name
self.name = name
try:
firstBlank = name.rindex(' ')
self.lastName = name[firstBlank+1:]
except:
self.lastName = name
self.age = None
def getLastName(self):
#return self's last name
return self.lastName
def setAge(self, age):
#assumes age is an int greater than 0
#sets self's age to age (in years)
self.age = age
def getAge(self):
#assumes that self's age has been set
#returns self's current age in years
if self.age == None:
raise ValueError
return self.age
def __lt__(self, other):
#return True if self's name is lexicographically less
#than other's name, and False otherwise
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
def __str__(self):
#return self's name
return self.name
class USResident(Person):
"""
A Person who resides in the US.
"""
def __init__(self, name, status):
"""
Initializes a Person object. A USResident object inherits
from Person and has one additional attribute:
status: a string, one of "citizen", "legal_resident", "illegal_resident"
Raises a ValueError if status is not one of those 3 strings
"""
Person.__init__(self, name)
if status == "citizen" or status == "legal_resident" or status == "illegal_resident":
self.status = status
else:
raise ValueError
def getStatus(self):
"""
Returns the status
"""
return self.status
a = USResident('Tim Beaver', 'citizen')
print(a.getStatus())
b = USResident('Tim Horton', 'non-resident')
|
<commit_before><commit_msg>Implement two methods in the USResident class<commit_after>
|
## DO NOT MODIFY THE IMPLEMENTATION OF THE Person CLASS ##
class Person(object):
def __init__(self, name):
#create a person with name name
self.name = name
try:
firstBlank = name.rindex(' ')
self.lastName = name[firstBlank+1:]
except:
self.lastName = name
self.age = None
def getLastName(self):
#return self's last name
return self.lastName
def setAge(self, age):
#assumes age is an int greater than 0
#sets self's age to age (in years)
self.age = age
def getAge(self):
#assumes that self's age has been set
#returns self's current age in years
if self.age == None:
raise ValueError
return self.age
def __lt__(self, other):
#return True if self's name is lexicographically less
#than other's name, and False otherwise
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
def __str__(self):
#return self's name
return self.name
class USResident(Person):
"""
A Person who resides in the US.
"""
def __init__(self, name, status):
"""
Initializes a Person object. A USResident object inherits
from Person and has one additional attribute:
status: a string, one of "citizen", "legal_resident", "illegal_resident"
Raises a ValueError if status is not one of those 3 strings
"""
Person.__init__(self, name)
if status == "citizen" or status == "legal_resident" or status == "illegal_resident":
self.status = status
else:
raise ValueError
def getStatus(self):
"""
Returns the status
"""
return self.status
a = USResident('Tim Beaver', 'citizen')
print(a.getStatus())
b = USResident('Tim Horton', 'non-resident')
|
Implement two methods in the USResident class## DO NOT MODIFY THE IMPLEMENTATION OF THE Person CLASS ##
class Person(object):
def __init__(self, name):
#create a person with name name
self.name = name
try:
firstBlank = name.rindex(' ')
self.lastName = name[firstBlank+1:]
except:
self.lastName = name
self.age = None
def getLastName(self):
#return self's last name
return self.lastName
def setAge(self, age):
#assumes age is an int greater than 0
#sets self's age to age (in years)
self.age = age
def getAge(self):
#assumes that self's age has been set
#returns self's current age in years
if self.age == None:
raise ValueError
return self.age
def __lt__(self, other):
#return True if self's name is lexicographically less
#than other's name, and False otherwise
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
def __str__(self):
#return self's name
return self.name
class USResident(Person):
"""
A Person who resides in the US.
"""
def __init__(self, name, status):
"""
Initializes a Person object. A USResident object inherits
from Person and has one additional attribute:
status: a string, one of "citizen", "legal_resident", "illegal_resident"
Raises a ValueError if status is not one of those 3 strings
"""
Person.__init__(self, name)
if status == "citizen" or status == "legal_resident" or status == "illegal_resident":
self.status = status
else:
raise ValueError
def getStatus(self):
"""
Returns the status
"""
return self.status
a = USResident('Tim Beaver', 'citizen')
print(a.getStatus())
b = USResident('Tim Horton', 'non-resident')
|
<commit_before><commit_msg>Implement two methods in the USResident class<commit_after>## DO NOT MODIFY THE IMPLEMENTATION OF THE Person CLASS ##
class Person(object):
def __init__(self, name):
#create a person with name name
self.name = name
try:
firstBlank = name.rindex(' ')
self.lastName = name[firstBlank+1:]
except:
self.lastName = name
self.age = None
def getLastName(self):
#return self's last name
return self.lastName
def setAge(self, age):
#assumes age is an int greater than 0
#sets self's age to age (in years)
self.age = age
def getAge(self):
#assumes that self's age has been set
#returns self's current age in years
if self.age == None:
raise ValueError
return self.age
def __lt__(self, other):
#return True if self's name is lexicographically less
#than other's name, and False otherwise
if self.lastName == other.lastName:
return self.name < other.name
return self.lastName < other.lastName
def __str__(self):
#return self's name
return self.name
class USResident(Person):
"""
A Person who resides in the US.
"""
def __init__(self, name, status):
"""
Initializes a Person object. A USResident object inherits
from Person and has one additional attribute:
status: a string, one of "citizen", "legal_resident", "illegal_resident"
Raises a ValueError if status is not one of those 3 strings
"""
Person.__init__(self, name)
if status == "citizen" or status == "legal_resident" or status == "illegal_resident":
self.status = status
else:
raise ValueError
def getStatus(self):
"""
Returns the status
"""
return self.status
a = USResident('Tim Beaver', 'citizen')
print(a.getStatus())
b = USResident('Tim Horton', 'non-resident')
|
|
7aab4b3e2761209aae312512ec6582a83865c912
|
tests/test_logger.py
|
tests/test_logger.py
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'name': 'Sir. Stephen Mathis of Nova Scotia', 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance(knight):
return dragon.Dragon('NMR', knight)
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
Implement unit tests for logger class
|
Implement unit tests for logger class
|
Python
|
mit
|
reinikai/mugloar
|
Implement unit tests for logger class
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'name': 'Sir. Stephen Mathis of Nova Scotia', 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance(knight):
return dragon.Dragon('NMR', knight)
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
<commit_before><commit_msg>Implement unit tests for logger class<commit_after>
|
import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'name': 'Sir. Stephen Mathis of Nova Scotia', 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance(knight):
return dragon.Dragon('NMR', knight)
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
Implement unit tests for logger classimport pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'name': 'Sir. Stephen Mathis of Nova Scotia', 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance(knight):
return dragon.Dragon('NMR', knight)
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
<commit_before><commit_msg>Implement unit tests for logger class<commit_after>import pytest
from mugloar import dragon, logger
@pytest.fixture
def log_instance():
"""Returns a Logger instance"""
return logger.Logger()
@pytest.fixture
def knight():
return {'agility': 8, 'endurance': 8, 'name': 'Sir. Stephen Mathis of Nova Scotia', 'armor': 0, 'attack': 4}
@pytest.fixture
def dragon_instance(knight):
return dragon.Dragon('NMR', knight)
@pytest.fixture
def stats_map():
return {'attack': 'scaleThickness',
'armor': 'clawSharpness',
'agility': 'wingStrength',
'endurance': 'fireBreath'}
def test_comparison(log_instance, knight, dragon_instance, stats_map):
log_instance.comparison(knight, dragon_instance, stats_map)
|
|
d3bcd60c92939211716e82718871559c38ddc19d
|
tests/test_random.py
|
tests/test_random.py
|
"""
Random and quasi-random generator tests.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy.testing as nt
from mwhutils.random import rstate
from mwhutils.random import uniform, latin, sobol
def test_rstate():
"""Test the rstate helper."""
rng = rstate()
rng = rstate(rng)
rng1 = rstate(1)
rng2 = rstate(1)
nt.assert_equal(rng1.randint(5), rng2.randint(5))
nt.assert_raises(ValueError, rstate, 'foo')
def check_random(method):
"""Check that the method implements the random-generator interface."""
bounds = [(0, 1), (3, 4)]
sample = method(bounds, 10)
assert sample.shape == (10, 2)
assert all(sample[:, 0] > 0) and all(sample[:, 0] < 1)
assert all(sample[:, 1] > 3) and all(sample[:, 1] < 4)
def test_random():
"""Test all the random generators."""
for method in [uniform, latin, sobol]:
yield check_random, method
|
Add test for random number generators.
|
Add test for random number generators.
|
Python
|
bsd-2-clause
|
mwhoffman/mwhutils
|
Add test for random number generators.
|
"""
Random and quasi-random generator tests.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy.testing as nt
from mwhutils.random import rstate
from mwhutils.random import uniform, latin, sobol
def test_rstate():
"""Test the rstate helper."""
rng = rstate()
rng = rstate(rng)
rng1 = rstate(1)
rng2 = rstate(1)
nt.assert_equal(rng1.randint(5), rng2.randint(5))
nt.assert_raises(ValueError, rstate, 'foo')
def check_random(method):
"""Check that the method implements the random-generator interface."""
bounds = [(0, 1), (3, 4)]
sample = method(bounds, 10)
assert sample.shape == (10, 2)
assert all(sample[:, 0] > 0) and all(sample[:, 0] < 1)
assert all(sample[:, 1] > 3) and all(sample[:, 1] < 4)
def test_random():
"""Test all the random generators."""
for method in [uniform, latin, sobol]:
yield check_random, method
|
<commit_before><commit_msg>Add test for random number generators.<commit_after>
|
"""
Random and quasi-random generator tests.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy.testing as nt
from mwhutils.random import rstate
from mwhutils.random import uniform, latin, sobol
def test_rstate():
"""Test the rstate helper."""
rng = rstate()
rng = rstate(rng)
rng1 = rstate(1)
rng2 = rstate(1)
nt.assert_equal(rng1.randint(5), rng2.randint(5))
nt.assert_raises(ValueError, rstate, 'foo')
def check_random(method):
"""Check that the method implements the random-generator interface."""
bounds = [(0, 1), (3, 4)]
sample = method(bounds, 10)
assert sample.shape == (10, 2)
assert all(sample[:, 0] > 0) and all(sample[:, 0] < 1)
assert all(sample[:, 1] > 3) and all(sample[:, 1] < 4)
def test_random():
"""Test all the random generators."""
for method in [uniform, latin, sobol]:
yield check_random, method
|
Add test for random number generators."""
Random and quasi-random generator tests.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy.testing as nt
from mwhutils.random import rstate
from mwhutils.random import uniform, latin, sobol
def test_rstate():
"""Test the rstate helper."""
rng = rstate()
rng = rstate(rng)
rng1 = rstate(1)
rng2 = rstate(1)
nt.assert_equal(rng1.randint(5), rng2.randint(5))
nt.assert_raises(ValueError, rstate, 'foo')
def check_random(method):
"""Check that the method implements the random-generator interface."""
bounds = [(0, 1), (3, 4)]
sample = method(bounds, 10)
assert sample.shape == (10, 2)
assert all(sample[:, 0] > 0) and all(sample[:, 0] < 1)
assert all(sample[:, 1] > 3) and all(sample[:, 1] < 4)
def test_random():
"""Test all the random generators."""
for method in [uniform, latin, sobol]:
yield check_random, method
|
<commit_before><commit_msg>Add test for random number generators.<commit_after>"""
Random and quasi-random generator tests.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy.testing as nt
from mwhutils.random import rstate
from mwhutils.random import uniform, latin, sobol
def test_rstate():
"""Test the rstate helper."""
rng = rstate()
rng = rstate(rng)
rng1 = rstate(1)
rng2 = rstate(1)
nt.assert_equal(rng1.randint(5), rng2.randint(5))
nt.assert_raises(ValueError, rstate, 'foo')
def check_random(method):
"""Check that the method implements the random-generator interface."""
bounds = [(0, 1), (3, 4)]
sample = method(bounds, 10)
assert sample.shape == (10, 2)
assert all(sample[:, 0] > 0) and all(sample[:, 0] < 1)
assert all(sample[:, 1] > 3) and all(sample[:, 1] < 4)
def test_random():
"""Test all the random generators."""
for method in [uniform, latin, sobol]:
yield check_random, method
|
|
f6f13b4ff52fbe9346e9f60ba054b305da122d36
|
tests/test_server.py
|
tests/test_server.py
|
import os
import time
import pytest
import requests
import json
import yaml
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def sess():
sess = requests.Session()
sess.headers = {
'Content-Type': 'application/json'
}
return sess
class TestStartProc(object):
def test_start_proc(self, sess):
doc = {
'cmd': 'python -m SimpleHTTPServer 7000'.split()
}
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(doc))
result = resp.json()
assert os.path.exists('/proc/%s' % result['pid'])
assert os.kill(result['pid'], 0) is None
class TestStartHelloProc(object):
url = 'http://localhost:5000/files/hello_server.py'
example_server = os.path.join(HERE, '..', 'example', 'hello_server.py')
example_spec = os.path.join(HERE, '..', 'example', 'hello_spec.yaml')
def put_file(self, sess):
resp = sess.put(self.url,
headers={'Content-Type': 'application/octet-stream'},
data=open(self.example_server))
assert resp.ok
def upload_spec(self, sess):
spec = yaml.safe_load(open(self.example_spec))
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(spec))
assert resp.ok
def test_put_file(self, sess):
self.put_file(sess)
resp = sess.get(self.url)
assert resp.ok
assert resp.content == open(self.hello_server).read()
def test_run_hello_server(self, sess):
self.put_file(sess)
self.upload_spec(sess)
time.sleep(10)
resp = sess.get('http://localhost:6010/')
assert resp.ok
assert resp.content == 'Hello World!'
|
Add some initial tests. These require the server to be running.
|
Add some initial tests. These require the server to be running.
|
Python
|
bsd-3-clause
|
ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd
|
Add some initial tests. These require the server to be running.
|
import os
import time
import pytest
import requests
import json
import yaml
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def sess():
sess = requests.Session()
sess.headers = {
'Content-Type': 'application/json'
}
return sess
class TestStartProc(object):
def test_start_proc(self, sess):
doc = {
'cmd': 'python -m SimpleHTTPServer 7000'.split()
}
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(doc))
result = resp.json()
assert os.path.exists('/proc/%s' % result['pid'])
assert os.kill(result['pid'], 0) is None
class TestStartHelloProc(object):
url = 'http://localhost:5000/files/hello_server.py'
example_server = os.path.join(HERE, '..', 'example', 'hello_server.py')
example_spec = os.path.join(HERE, '..', 'example', 'hello_spec.yaml')
def put_file(self, sess):
resp = sess.put(self.url,
headers={'Content-Type': 'application/octet-stream'},
data=open(self.example_server))
assert resp.ok
def upload_spec(self, sess):
spec = yaml.safe_load(open(self.example_spec))
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(spec))
assert resp.ok
def test_put_file(self, sess):
self.put_file(sess)
resp = sess.get(self.url)
assert resp.ok
assert resp.content == open(self.hello_server).read()
def test_run_hello_server(self, sess):
self.put_file(sess)
self.upload_spec(sess)
time.sleep(10)
resp = sess.get('http://localhost:6010/')
assert resp.ok
assert resp.content == 'Hello World!'
|
<commit_before><commit_msg>Add some initial tests. These require the server to be running.<commit_after>
|
import os
import time
import pytest
import requests
import json
import yaml
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def sess():
sess = requests.Session()
sess.headers = {
'Content-Type': 'application/json'
}
return sess
class TestStartProc(object):
def test_start_proc(self, sess):
doc = {
'cmd': 'python -m SimpleHTTPServer 7000'.split()
}
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(doc))
result = resp.json()
assert os.path.exists('/proc/%s' % result['pid'])
assert os.kill(result['pid'], 0) is None
class TestStartHelloProc(object):
url = 'http://localhost:5000/files/hello_server.py'
example_server = os.path.join(HERE, '..', 'example', 'hello_server.py')
example_spec = os.path.join(HERE, '..', 'example', 'hello_spec.yaml')
def put_file(self, sess):
resp = sess.put(self.url,
headers={'Content-Type': 'application/octet-stream'},
data=open(self.example_server))
assert resp.ok
def upload_spec(self, sess):
spec = yaml.safe_load(open(self.example_spec))
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(spec))
assert resp.ok
def test_put_file(self, sess):
self.put_file(sess)
resp = sess.get(self.url)
assert resp.ok
assert resp.content == open(self.hello_server).read()
def test_run_hello_server(self, sess):
self.put_file(sess)
self.upload_spec(sess)
time.sleep(10)
resp = sess.get('http://localhost:6010/')
assert resp.ok
assert resp.content == 'Hello World!'
|
Add some initial tests. These require the server to be running.import os
import time
import pytest
import requests
import json
import yaml
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def sess():
sess = requests.Session()
sess.headers = {
'Content-Type': 'application/json'
}
return sess
class TestStartProc(object):
def test_start_proc(self, sess):
doc = {
'cmd': 'python -m SimpleHTTPServer 7000'.split()
}
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(doc))
result = resp.json()
assert os.path.exists('/proc/%s' % result['pid'])
assert os.kill(result['pid'], 0) is None
class TestStartHelloProc(object):
url = 'http://localhost:5000/files/hello_server.py'
example_server = os.path.join(HERE, '..', 'example', 'hello_server.py')
example_spec = os.path.join(HERE, '..', 'example', 'hello_spec.yaml')
def put_file(self, sess):
resp = sess.put(self.url,
headers={'Content-Type': 'application/octet-stream'},
data=open(self.example_server))
assert resp.ok
def upload_spec(self, sess):
spec = yaml.safe_load(open(self.example_spec))
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(spec))
assert resp.ok
def test_put_file(self, sess):
self.put_file(sess)
resp = sess.get(self.url)
assert resp.ok
assert resp.content == open(self.hello_server).read()
def test_run_hello_server(self, sess):
self.put_file(sess)
self.upload_spec(sess)
time.sleep(10)
resp = sess.get('http://localhost:6010/')
assert resp.ok
assert resp.content == 'Hello World!'
|
<commit_before><commit_msg>Add some initial tests. These require the server to be running.<commit_after>import os
import time
import pytest
import requests
import json
import yaml
HERE = os.path.dirname(os.path.abspath(__file__))
@pytest.fixture()
def sess():
sess = requests.Session()
sess.headers = {
'Content-Type': 'application/json'
}
return sess
class TestStartProc(object):
def test_start_proc(self, sess):
doc = {
'cmd': 'python -m SimpleHTTPServer 7000'.split()
}
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(doc))
result = resp.json()
assert os.path.exists('/proc/%s' % result['pid'])
assert os.kill(result['pid'], 0) is None
class TestStartHelloProc(object):
url = 'http://localhost:5000/files/hello_server.py'
example_server = os.path.join(HERE, '..', 'example', 'hello_server.py')
example_spec = os.path.join(HERE, '..', 'example', 'hello_spec.yaml')
def put_file(self, sess):
resp = sess.put(self.url,
headers={'Content-Type': 'application/octet-stream'},
data=open(self.example_server))
assert resp.ok
def upload_spec(self, sess):
spec = yaml.safe_load(open(self.example_spec))
resp = sess.post('http://localhost:5000/run/',
data=json.dumps(spec))
assert resp.ok
def test_put_file(self, sess):
self.put_file(sess)
resp = sess.get(self.url)
assert resp.ok
assert resp.content == open(self.hello_server).read()
def test_run_hello_server(self, sess):
self.put_file(sess)
self.upload_spec(sess)
time.sleep(10)
resp = sess.get('http://localhost:6010/')
assert resp.ok
assert resp.content == 'Hello World!'
|
|
76aae555aefa154ad30aec6d63669c6b0b520087
|
tests/test_init.py
|
tests/test_init.py
|
from flexmock import flexmock
import flask_uploads
from flask_uploads import Upload
from . import TestCase
class TestWithoutResizer(TestCase):
def test_upload_class_has_correct_attributes(self):
assert hasattr(Upload, 'id')
assert Upload.id == (
'column',
(('integer', [], {}),),
{'autoincrement': True, 'primary_key': True}
)
assert hasattr(Upload, 'name')
assert Upload.name == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
assert hasattr(Upload, 'url')
assert Upload.url == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is None
class TestWithResizer(TestWithoutResizer):
def setup_method(self, method):
resizer = flexmock(
sizes={
'nail': (8, 19),
'palm': (329, 192),
}
)
TestWithoutResizer.setup_method(self, method, resizer)
def test_upload_class_has_correct_attributes(self):
TestWithoutResizer.test_upload_class_has_correct_attributes(self)
assert hasattr(Upload, 'nail_name')
assert Upload.nail_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'nail_url')
assert Upload.nail_url == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_name')
assert Upload.palm_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_url')
assert Upload.palm_url == (
'column',
(('unicode', (255,), {}),),
{}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is self.resizer
|
Add tests for init function.
|
Add tests for init function.
|
Python
|
mit
|
FelixLoether/flask-uploads,FelixLoether/flask-image-upload-thing
|
Add tests for init function.
|
from flexmock import flexmock
import flask_uploads
from flask_uploads import Upload
from . import TestCase
class TestWithoutResizer(TestCase):
def test_upload_class_has_correct_attributes(self):
assert hasattr(Upload, 'id')
assert Upload.id == (
'column',
(('integer', [], {}),),
{'autoincrement': True, 'primary_key': True}
)
assert hasattr(Upload, 'name')
assert Upload.name == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
assert hasattr(Upload, 'url')
assert Upload.url == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is None
class TestWithResizer(TestWithoutResizer):
def setup_method(self, method):
resizer = flexmock(
sizes={
'nail': (8, 19),
'palm': (329, 192),
}
)
TestWithoutResizer.setup_method(self, method, resizer)
def test_upload_class_has_correct_attributes(self):
TestWithoutResizer.test_upload_class_has_correct_attributes(self)
assert hasattr(Upload, 'nail_name')
assert Upload.nail_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'nail_url')
assert Upload.nail_url == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_name')
assert Upload.palm_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_url')
assert Upload.palm_url == (
'column',
(('unicode', (255,), {}),),
{}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is self.resizer
|
<commit_before><commit_msg>Add tests for init function.<commit_after>
|
from flexmock import flexmock
import flask_uploads
from flask_uploads import Upload
from . import TestCase
class TestWithoutResizer(TestCase):
def test_upload_class_has_correct_attributes(self):
assert hasattr(Upload, 'id')
assert Upload.id == (
'column',
(('integer', [], {}),),
{'autoincrement': True, 'primary_key': True}
)
assert hasattr(Upload, 'name')
assert Upload.name == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
assert hasattr(Upload, 'url')
assert Upload.url == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is None
class TestWithResizer(TestWithoutResizer):
def setup_method(self, method):
resizer = flexmock(
sizes={
'nail': (8, 19),
'palm': (329, 192),
}
)
TestWithoutResizer.setup_method(self, method, resizer)
def test_upload_class_has_correct_attributes(self):
TestWithoutResizer.test_upload_class_has_correct_attributes(self)
assert hasattr(Upload, 'nail_name')
assert Upload.nail_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'nail_url')
assert Upload.nail_url == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_name')
assert Upload.palm_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_url')
assert Upload.palm_url == (
'column',
(('unicode', (255,), {}),),
{}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is self.resizer
|
Add tests for init function.from flexmock import flexmock
import flask_uploads
from flask_uploads import Upload
from . import TestCase
class TestWithoutResizer(TestCase):
def test_upload_class_has_correct_attributes(self):
assert hasattr(Upload, 'id')
assert Upload.id == (
'column',
(('integer', [], {}),),
{'autoincrement': True, 'primary_key': True}
)
assert hasattr(Upload, 'name')
assert Upload.name == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
assert hasattr(Upload, 'url')
assert Upload.url == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is None
class TestWithResizer(TestWithoutResizer):
def setup_method(self, method):
resizer = flexmock(
sizes={
'nail': (8, 19),
'palm': (329, 192),
}
)
TestWithoutResizer.setup_method(self, method, resizer)
def test_upload_class_has_correct_attributes(self):
TestWithoutResizer.test_upload_class_has_correct_attributes(self)
assert hasattr(Upload, 'nail_name')
assert Upload.nail_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'nail_url')
assert Upload.nail_url == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_name')
assert Upload.palm_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_url')
assert Upload.palm_url == (
'column',
(('unicode', (255,), {}),),
{}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is self.resizer
|
<commit_before><commit_msg>Add tests for init function.<commit_after>from flexmock import flexmock
import flask_uploads
from flask_uploads import Upload
from . import TestCase
class TestWithoutResizer(TestCase):
def test_upload_class_has_correct_attributes(self):
assert hasattr(Upload, 'id')
assert Upload.id == (
'column',
(('integer', [], {}),),
{'autoincrement': True, 'primary_key': True}
)
assert hasattr(Upload, 'name')
assert Upload.name == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
assert hasattr(Upload, 'url')
assert Upload.url == (
'column',
(('unicode', (255,), {}),),
{'nullable': False}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is None
class TestWithResizer(TestWithoutResizer):
def setup_method(self, method):
resizer = flexmock(
sizes={
'nail': (8, 19),
'palm': (329, 192),
}
)
TestWithoutResizer.setup_method(self, method, resizer)
def test_upload_class_has_correct_attributes(self):
TestWithoutResizer.test_upload_class_has_correct_attributes(self)
assert hasattr(Upload, 'nail_name')
assert Upload.nail_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'nail_url')
assert Upload.nail_url == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_name')
assert Upload.palm_name == (
'column',
(('unicode', (255,), {}),),
{}
)
assert hasattr(Upload, 'palm_url')
assert Upload.palm_url == (
'column',
(('unicode', (255,), {}),),
{}
)
def test_globals_are_set_correctly(self):
assert flask_uploads._db is self.db
assert flask_uploads._Storage is self.Storage
assert flask_uploads._resizer is self.resizer
|
|
a0771954902d6c4af2709d871a115680f89a0497
|
installer/terraform/jazz-terraform-unix-noinstances/scripts/gitlab_privatetoken.py
|
installer/terraform/jazz-terraform-unix-noinstances/scripts/gitlab_privatetoken.py
|
import requests
from datetime import date
from urlparse import urljoin
from bs4 import BeautifulSoup
def find_csrf_token(text):
soup = BeautifulSoup(text, "lxml")
token = soup.find(attrs={"name": u"csrf-token"})
param = soup.find(attrs={"name": u"csrf-param"})
data = {param.get("content"): token.get(u"content")}
return data
def obtain_csrf_token(url):
r = requests.get(url)
token = find_csrf_token(r.text)
return token, r.cookies
def sign_in(sign_in_url, login, password, csrf, cookies):
data = {
u"user[login]": login,
u"user[password]": password,
u"user[remember_me]": 0,
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(sign_in_url, data=data, cookies=cookies)
token = find_csrf_token(r.text)
return token, r.history[0].cookies
def obtain_personal_access_token(pat_url, name, csrf, cookies):
today = date.today()
data = {
u"personal_access_token[expires_at]": today.replace(year=today.year + 1),
u"personal_access_token[name]": name,
u"personal_access_token[scopes][]": u"api",
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(pat_url, data=data, cookies=cookies)
soup = BeautifulSoup(r.text, u"lxml")
token = soup.find(
u'input', id=u'created-personal-access-token').get('value')
return token
def generate_personal_access_token(name, password, endpoint):
login = "root"
URL = 'http://{0}'.format(endpoint)
SIGN_IN_URL = urljoin(URL, "/users/sign_in")
PAT_URL = urljoin(URL, "/profile/personal_access_tokens")
csrf1, cookies1 = obtain_csrf_token(URL)
csrf2, cookies2 = sign_in(SIGN_IN_URL, login, password, csrf1, cookies1)
return obtain_personal_access_token(PAT_URL, name, csrf2, cookies2)
|
Add alt implementation to try and see if it works better
|
Add alt implementation to try and see if it works better
|
Python
|
apache-2.0
|
tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer,tmobile/jazz-installer
|
Add alt implementation to try and see if it works better
|
import requests
from datetime import date
from urlparse import urljoin
from bs4 import BeautifulSoup
def find_csrf_token(text):
soup = BeautifulSoup(text, "lxml")
token = soup.find(attrs={"name": u"csrf-token"})
param = soup.find(attrs={"name": u"csrf-param"})
data = {param.get("content"): token.get(u"content")}
return data
def obtain_csrf_token(url):
r = requests.get(url)
token = find_csrf_token(r.text)
return token, r.cookies
def sign_in(sign_in_url, login, password, csrf, cookies):
data = {
u"user[login]": login,
u"user[password]": password,
u"user[remember_me]": 0,
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(sign_in_url, data=data, cookies=cookies)
token = find_csrf_token(r.text)
return token, r.history[0].cookies
def obtain_personal_access_token(pat_url, name, csrf, cookies):
today = date.today()
data = {
u"personal_access_token[expires_at]": today.replace(year=today.year + 1),
u"personal_access_token[name]": name,
u"personal_access_token[scopes][]": u"api",
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(pat_url, data=data, cookies=cookies)
soup = BeautifulSoup(r.text, u"lxml")
token = soup.find(
u'input', id=u'created-personal-access-token').get('value')
return token
def generate_personal_access_token(name, password, endpoint):
login = "root"
URL = 'http://{0}'.format(endpoint)
SIGN_IN_URL = urljoin(URL, "/users/sign_in")
PAT_URL = urljoin(URL, "/profile/personal_access_tokens")
csrf1, cookies1 = obtain_csrf_token(URL)
csrf2, cookies2 = sign_in(SIGN_IN_URL, login, password, csrf1, cookies1)
return obtain_personal_access_token(PAT_URL, name, csrf2, cookies2)
|
<commit_before><commit_msg>Add alt implementation to try and see if it works better<commit_after>
|
import requests
from datetime import date
from urlparse import urljoin
from bs4 import BeautifulSoup
def find_csrf_token(text):
soup = BeautifulSoup(text, "lxml")
token = soup.find(attrs={"name": u"csrf-token"})
param = soup.find(attrs={"name": u"csrf-param"})
data = {param.get("content"): token.get(u"content")}
return data
def obtain_csrf_token(url):
r = requests.get(url)
token = find_csrf_token(r.text)
return token, r.cookies
def sign_in(sign_in_url, login, password, csrf, cookies):
data = {
u"user[login]": login,
u"user[password]": password,
u"user[remember_me]": 0,
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(sign_in_url, data=data, cookies=cookies)
token = find_csrf_token(r.text)
return token, r.history[0].cookies
def obtain_personal_access_token(pat_url, name, csrf, cookies):
today = date.today()
data = {
u"personal_access_token[expires_at]": today.replace(year=today.year + 1),
u"personal_access_token[name]": name,
u"personal_access_token[scopes][]": u"api",
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(pat_url, data=data, cookies=cookies)
soup = BeautifulSoup(r.text, u"lxml")
token = soup.find(
u'input', id=u'created-personal-access-token').get('value')
return token
def generate_personal_access_token(name, password, endpoint):
login = "root"
URL = 'http://{0}'.format(endpoint)
SIGN_IN_URL = urljoin(URL, "/users/sign_in")
PAT_URL = urljoin(URL, "/profile/personal_access_tokens")
csrf1, cookies1 = obtain_csrf_token(URL)
csrf2, cookies2 = sign_in(SIGN_IN_URL, login, password, csrf1, cookies1)
return obtain_personal_access_token(PAT_URL, name, csrf2, cookies2)
|
Add alt implementation to try and see if it works betterimport requests
from datetime import date
from urlparse import urljoin
from bs4 import BeautifulSoup
def find_csrf_token(text):
soup = BeautifulSoup(text, "lxml")
token = soup.find(attrs={"name": u"csrf-token"})
param = soup.find(attrs={"name": u"csrf-param"})
data = {param.get("content"): token.get(u"content")}
return data
def obtain_csrf_token(url):
r = requests.get(url)
token = find_csrf_token(r.text)
return token, r.cookies
def sign_in(sign_in_url, login, password, csrf, cookies):
data = {
u"user[login]": login,
u"user[password]": password,
u"user[remember_me]": 0,
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(sign_in_url, data=data, cookies=cookies)
token = find_csrf_token(r.text)
return token, r.history[0].cookies
def obtain_personal_access_token(pat_url, name, csrf, cookies):
today = date.today()
data = {
u"personal_access_token[expires_at]": today.replace(year=today.year + 1),
u"personal_access_token[name]": name,
u"personal_access_token[scopes][]": u"api",
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(pat_url, data=data, cookies=cookies)
soup = BeautifulSoup(r.text, u"lxml")
token = soup.find(
u'input', id=u'created-personal-access-token').get('value')
return token
def generate_personal_access_token(name, password, endpoint):
login = "root"
URL = 'http://{0}'.format(endpoint)
SIGN_IN_URL = urljoin(URL, "/users/sign_in")
PAT_URL = urljoin(URL, "/profile/personal_access_tokens")
csrf1, cookies1 = obtain_csrf_token(URL)
csrf2, cookies2 = sign_in(SIGN_IN_URL, login, password, csrf1, cookies1)
return obtain_personal_access_token(PAT_URL, name, csrf2, cookies2)
|
<commit_before><commit_msg>Add alt implementation to try and see if it works better<commit_after>import requests
from datetime import date
from urlparse import urljoin
from bs4 import BeautifulSoup
def find_csrf_token(text):
soup = BeautifulSoup(text, "lxml")
token = soup.find(attrs={"name": u"csrf-token"})
param = soup.find(attrs={"name": u"csrf-param"})
data = {param.get("content"): token.get(u"content")}
return data
def obtain_csrf_token(url):
r = requests.get(url)
token = find_csrf_token(r.text)
return token, r.cookies
def sign_in(sign_in_url, login, password, csrf, cookies):
data = {
u"user[login]": login,
u"user[password]": password,
u"user[remember_me]": 0,
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(sign_in_url, data=data, cookies=cookies)
token = find_csrf_token(r.text)
return token, r.history[0].cookies
def obtain_personal_access_token(pat_url, name, csrf, cookies):
today = date.today()
data = {
u"personal_access_token[expires_at]": today.replace(year=today.year + 1),
u"personal_access_token[name]": name,
u"personal_access_token[scopes][]": u"api",
u"utf8": u"✓"
}
data.update(csrf)
r = requests.post(pat_url, data=data, cookies=cookies)
soup = BeautifulSoup(r.text, u"lxml")
token = soup.find(
u'input', id=u'created-personal-access-token').get('value')
return token
def generate_personal_access_token(name, password, endpoint):
login = "root"
URL = 'http://{0}'.format(endpoint)
SIGN_IN_URL = urljoin(URL, "/users/sign_in")
PAT_URL = urljoin(URL, "/profile/personal_access_tokens")
csrf1, cookies1 = obtain_csrf_token(URL)
csrf2, cookies2 = sign_in(SIGN_IN_URL, login, password, csrf1, cookies1)
return obtain_personal_access_token(PAT_URL, name, csrf2, cookies2)
|
|
c8af96f89ca218c0c49b90b39fe6ea4339e39321
|
tests/test_html_formatter.py
|
tests/test_html_formatter.py
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: 2006 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import unittest
import StringIO
import random
from pygments import lexers, formatters
from pygments.token import _TokenType
class HtmlFormatterTest(unittest.TestCase):
def test_external_css(self):
# TODO: write this test.
pass
|
Add a reminder to write a HTML formatter test.
|
[svn] Add a reminder to write a HTML formatter test.
|
Python
|
bsd-2-clause
|
aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments
|
[svn] Add a reminder to write a HTML formatter test.
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: 2006 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import unittest
import StringIO
import random
from pygments import lexers, formatters
from pygments.token import _TokenType
class HtmlFormatterTest(unittest.TestCase):
def test_external_css(self):
# TODO: write this test.
pass
|
<commit_before><commit_msg>[svn] Add a reminder to write a HTML formatter test.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: 2006 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import unittest
import StringIO
import random
from pygments import lexers, formatters
from pygments.token import _TokenType
class HtmlFormatterTest(unittest.TestCase):
def test_external_css(self):
# TODO: write this test.
pass
|
[svn] Add a reminder to write a HTML formatter test.# -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: 2006 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import unittest
import StringIO
import random
from pygments import lexers, formatters
from pygments.token import _TokenType
class HtmlFormatterTest(unittest.TestCase):
def test_external_css(self):
# TODO: write this test.
pass
|
<commit_before><commit_msg>[svn] Add a reminder to write a HTML formatter test.<commit_after># -*- coding: utf-8 -*-
"""
Pygments HTML formatter tests
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: 2006 by Georg Brandl.
:license: BSD, see LICENSE for more details.
"""
import unittest
import StringIO
import random
from pygments import lexers, formatters
from pygments.token import _TokenType
class HtmlFormatterTest(unittest.TestCase):
def test_external_css(self):
# TODO: write this test.
pass
|
|
5276206be1698e9adb231e61c2686f640825f6a7
|
dynamicforms/utils.py
|
dynamicforms/utils.py
|
def get_class(class_string, exception=Exception):
"""
Convert a string version of a function name to the callable object.
"""
if not hasattr(class_string, '__bases__'):
try:
class_string = class_string.encode('ascii')
mod_name, class_name = get_mod_func(class_string)
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
pass
else:
return cls
raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
('django.views.news.stories', 'story_detail')
"""
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
Add string to callable util functions
|
Add string to callable util functions
|
Python
|
bsd-3-clause
|
SheepDogInc/django-dynamicforms,SheepDogInc/django-dynamicforms
|
Add string to callable util functions
|
def get_class(class_string, exception=Exception):
"""
Convert a string version of a function name to the callable object.
"""
if not hasattr(class_string, '__bases__'):
try:
class_string = class_string.encode('ascii')
mod_name, class_name = get_mod_func(class_string)
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
pass
else:
return cls
raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
('django.views.news.stories', 'story_detail')
"""
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
<commit_before><commit_msg>Add string to callable util functions<commit_after>
|
def get_class(class_string, exception=Exception):
"""
Convert a string version of a function name to the callable object.
"""
if not hasattr(class_string, '__bases__'):
try:
class_string = class_string.encode('ascii')
mod_name, class_name = get_mod_func(class_string)
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
pass
else:
return cls
raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
('django.views.news.stories', 'story_detail')
"""
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
Add string to callable util functionsdef get_class(class_string, exception=Exception):
"""
Convert a string version of a function name to the callable object.
"""
if not hasattr(class_string, '__bases__'):
try:
class_string = class_string.encode('ascii')
mod_name, class_name = get_mod_func(class_string)
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
pass
else:
return cls
raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
('django.views.news.stories', 'story_detail')
"""
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
<commit_before><commit_msg>Add string to callable util functions<commit_after>def get_class(class_string, exception=Exception):
"""
Convert a string version of a function name to the callable object.
"""
if not hasattr(class_string, '__bases__'):
try:
class_string = class_string.encode('ascii')
mod_name, class_name = get_mod_func(class_string)
if class_name != '':
cls = getattr(__import__(mod_name, {}, {}, ['']), class_name)
except (ImportError, AttributeError):
pass
else:
return cls
raise exception('Failed to import %s' % class_string)
def get_mod_func(callback):
"""
Converts 'django.views.news.stories.story_detail' to
('django.views.news.stories', 'story_detail')
"""
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
|
bad1a775e54ab2681adbeec483a7035b1d966ded
|
blvd_text2svm.py
|
blvd_text2svm.py
|
import os
import re
import unicodedata
from libshorttext.converter import *
def tokenizer(text):
def trim_links(input_text):
input_text = re.sub(r' (http|www|t\.co).*?(?: |$)', '', input_text)
return input_text
def trim_char(c):
if ord(c) > 127:
return ''
if c.isdigit() or c.isalpha() or c.isspace():
return c
if re.match(r'[#$/\-_]', c):
return c
else:
return ''
def trim_hashtags(input_text):
tag = re.search(r'#+([\w_]+[\w\'_\-]*[\w_]+)', input_text)
if tag:
hashtag = tag.group(0)
tag_text = tag.group(1)
splittable = re.match(r'.*([A-Z]|_|-|\d)', tag_text)
if splittable:
tokens = [token for token in re.split(r'([A-Z][a-z]*)|_|-|(\d*)', tag_text) if token]
new_text = ' '.join(tokens)
input_text = input_text.replace(hashtag, new_text)
return trim_hashtags(input_text)
else:
input_text = input_text.replace(hashtag, tag_text)
return trim_hashtags(input_text)
else:
return input_text
def trim_digits(input_text):
return re.sub(r'\d+', r'#', input_text)
def trim_spaces(input_text):
return re.sub(r'\s+', r' ', input_text)
text = unicodedata.normalize('NFD', unicode(text, 'utf-8'))
text = trim_links(text)
text = ''.join(map(trim_char, text))
text = trim_hashtags(text)
text = re.sub(r'([a-z])([0-9])', r'\1 \2', text)
text = re.sub(r'([0-9])([a-z])', r'\1 \2', text)
text = trim_digits(text)
text = trim_spaces(text)
return text.strip().lower().split()
text_converter = Text2svmConverter('-stopword 1 -stemming 1')
text_converter.text_prep.tokenizer = tokenizer
def process():
os.chdir(os.path.dirname(__file__))
convert_text('training_data/post_sale,event,food,info', text_converter, 'outputs/test1.svm')
text_converter.save('outputs/test1.text_converter')
process()
|
Add custom text2svm preprocessor with a blvd-minded tokenizer.
|
Add custom text2svm preprocessor with a blvd-minded tokenizer.
|
Python
|
bsd-3-clause
|
izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext,izimobile/libshorttext
|
Add custom text2svm preprocessor with a blvd-minded tokenizer.
|
import os
import re
import unicodedata
from libshorttext.converter import *
def tokenizer(text):
def trim_links(input_text):
input_text = re.sub(r' (http|www|t\.co).*?(?: |$)', '', input_text)
return input_text
def trim_char(c):
if ord(c) > 127:
return ''
if c.isdigit() or c.isalpha() or c.isspace():
return c
if re.match(r'[#$/\-_]', c):
return c
else:
return ''
def trim_hashtags(input_text):
tag = re.search(r'#+([\w_]+[\w\'_\-]*[\w_]+)', input_text)
if tag:
hashtag = tag.group(0)
tag_text = tag.group(1)
splittable = re.match(r'.*([A-Z]|_|-|\d)', tag_text)
if splittable:
tokens = [token for token in re.split(r'([A-Z][a-z]*)|_|-|(\d*)', tag_text) if token]
new_text = ' '.join(tokens)
input_text = input_text.replace(hashtag, new_text)
return trim_hashtags(input_text)
else:
input_text = input_text.replace(hashtag, tag_text)
return trim_hashtags(input_text)
else:
return input_text
def trim_digits(input_text):
return re.sub(r'\d+', r'#', input_text)
def trim_spaces(input_text):
return re.sub(r'\s+', r' ', input_text)
text = unicodedata.normalize('NFD', unicode(text, 'utf-8'))
text = trim_links(text)
text = ''.join(map(trim_char, text))
text = trim_hashtags(text)
text = re.sub(r'([a-z])([0-9])', r'\1 \2', text)
text = re.sub(r'([0-9])([a-z])', r'\1 \2', text)
text = trim_digits(text)
text = trim_spaces(text)
return text.strip().lower().split()
text_converter = Text2svmConverter('-stopword 1 -stemming 1')
text_converter.text_prep.tokenizer = tokenizer
def process():
os.chdir(os.path.dirname(__file__))
convert_text('training_data/post_sale,event,food,info', text_converter, 'outputs/test1.svm')
text_converter.save('outputs/test1.text_converter')
process()
|
<commit_before><commit_msg>Add custom text2svm preprocessor with a blvd-minded tokenizer.<commit_after>
|
import os
import re
import unicodedata
from libshorttext.converter import *
def tokenizer(text):
def trim_links(input_text):
input_text = re.sub(r' (http|www|t\.co).*?(?: |$)', '', input_text)
return input_text
def trim_char(c):
if ord(c) > 127:
return ''
if c.isdigit() or c.isalpha() or c.isspace():
return c
if re.match(r'[#$/\-_]', c):
return c
else:
return ''
def trim_hashtags(input_text):
tag = re.search(r'#+([\w_]+[\w\'_\-]*[\w_]+)', input_text)
if tag:
hashtag = tag.group(0)
tag_text = tag.group(1)
splittable = re.match(r'.*([A-Z]|_|-|\d)', tag_text)
if splittable:
tokens = [token for token in re.split(r'([A-Z][a-z]*)|_|-|(\d*)', tag_text) if token]
new_text = ' '.join(tokens)
input_text = input_text.replace(hashtag, new_text)
return trim_hashtags(input_text)
else:
input_text = input_text.replace(hashtag, tag_text)
return trim_hashtags(input_text)
else:
return input_text
def trim_digits(input_text):
return re.sub(r'\d+', r'#', input_text)
def trim_spaces(input_text):
return re.sub(r'\s+', r' ', input_text)
text = unicodedata.normalize('NFD', unicode(text, 'utf-8'))
text = trim_links(text)
text = ''.join(map(trim_char, text))
text = trim_hashtags(text)
text = re.sub(r'([a-z])([0-9])', r'\1 \2', text)
text = re.sub(r'([0-9])([a-z])', r'\1 \2', text)
text = trim_digits(text)
text = trim_spaces(text)
return text.strip().lower().split()
text_converter = Text2svmConverter('-stopword 1 -stemming 1')
text_converter.text_prep.tokenizer = tokenizer
def process():
os.chdir(os.path.dirname(__file__))
convert_text('training_data/post_sale,event,food,info', text_converter, 'outputs/test1.svm')
text_converter.save('outputs/test1.text_converter')
process()
|
Add custom text2svm preprocessor with a blvd-minded tokenizer.import os
import re
import unicodedata
from libshorttext.converter import *
def tokenizer(text):
def trim_links(input_text):
input_text = re.sub(r' (http|www|t\.co).*?(?: |$)', '', input_text)
return input_text
def trim_char(c):
if ord(c) > 127:
return ''
if c.isdigit() or c.isalpha() or c.isspace():
return c
if re.match(r'[#$/\-_]', c):
return c
else:
return ''
def trim_hashtags(input_text):
tag = re.search(r'#+([\w_]+[\w\'_\-]*[\w_]+)', input_text)
if tag:
hashtag = tag.group(0)
tag_text = tag.group(1)
splittable = re.match(r'.*([A-Z]|_|-|\d)', tag_text)
if splittable:
tokens = [token for token in re.split(r'([A-Z][a-z]*)|_|-|(\d*)', tag_text) if token]
new_text = ' '.join(tokens)
input_text = input_text.replace(hashtag, new_text)
return trim_hashtags(input_text)
else:
input_text = input_text.replace(hashtag, tag_text)
return trim_hashtags(input_text)
else:
return input_text
def trim_digits(input_text):
return re.sub(r'\d+', r'#', input_text)
def trim_spaces(input_text):
return re.sub(r'\s+', r' ', input_text)
text = unicodedata.normalize('NFD', unicode(text, 'utf-8'))
text = trim_links(text)
text = ''.join(map(trim_char, text))
text = trim_hashtags(text)
text = re.sub(r'([a-z])([0-9])', r'\1 \2', text)
text = re.sub(r'([0-9])([a-z])', r'\1 \2', text)
text = trim_digits(text)
text = trim_spaces(text)
return text.strip().lower().split()
text_converter = Text2svmConverter('-stopword 1 -stemming 1')
text_converter.text_prep.tokenizer = tokenizer
def process():
os.chdir(os.path.dirname(__file__))
convert_text('training_data/post_sale,event,food,info', text_converter, 'outputs/test1.svm')
text_converter.save('outputs/test1.text_converter')
process()
|
<commit_before><commit_msg>Add custom text2svm preprocessor with a blvd-minded tokenizer.<commit_after>import os
import re
import unicodedata
from libshorttext.converter import *
def tokenizer(text):
def trim_links(input_text):
input_text = re.sub(r' (http|www|t\.co).*?(?: |$)', '', input_text)
return input_text
def trim_char(c):
if ord(c) > 127:
return ''
if c.isdigit() or c.isalpha() or c.isspace():
return c
if re.match(r'[#$/\-_]', c):
return c
else:
return ''
def trim_hashtags(input_text):
tag = re.search(r'#+([\w_]+[\w\'_\-]*[\w_]+)', input_text)
if tag:
hashtag = tag.group(0)
tag_text = tag.group(1)
splittable = re.match(r'.*([A-Z]|_|-|\d)', tag_text)
if splittable:
tokens = [token for token in re.split(r'([A-Z][a-z]*)|_|-|(\d*)', tag_text) if token]
new_text = ' '.join(tokens)
input_text = input_text.replace(hashtag, new_text)
return trim_hashtags(input_text)
else:
input_text = input_text.replace(hashtag, tag_text)
return trim_hashtags(input_text)
else:
return input_text
def trim_digits(input_text):
return re.sub(r'\d+', r'#', input_text)
def trim_spaces(input_text):
return re.sub(r'\s+', r' ', input_text)
text = unicodedata.normalize('NFD', unicode(text, 'utf-8'))
text = trim_links(text)
text = ''.join(map(trim_char, text))
text = trim_hashtags(text)
text = re.sub(r'([a-z])([0-9])', r'\1 \2', text)
text = re.sub(r'([0-9])([a-z])', r'\1 \2', text)
text = trim_digits(text)
text = trim_spaces(text)
return text.strip().lower().split()
text_converter = Text2svmConverter('-stopword 1 -stemming 1')
text_converter.text_prep.tokenizer = tokenizer
def process():
os.chdir(os.path.dirname(__file__))
convert_text('training_data/post_sale,event,food,info', text_converter, 'outputs/test1.svm')
text_converter.save('outputs/test1.text_converter')
process()
|
|
96101d6158d7cf3e3e0efd287cefa27230d2f556
|
py/subtree-of-another-tree.py
|
py/subtree-of-another-tree.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sameTree(self, t1, t2):
if t1 is None and t2 is None:
return True
if t1 is None or t2 is None:
return False
return t1.val == t2.val and self.sameTree(t1.left, t2.left) and self.sameTree(t1.right, t2.right)
def dfs(self, cur):
if cur:
if self.sameTree(cur, self.t):
yield True
for x in self.dfs(cur.left):
yield x
for x in self.dfs(cur.right):
yield x
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
self.t = t
for _ in self.dfs(s):
return True
return False
|
Add py solution for 572. Subtree of Another Tree
|
Add py solution for 572. Subtree of Another Tree
572. Subtree of Another Tree: https://leetcode.com/problems/subtree-of-another-tree/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 572. Subtree of Another Tree
572. Subtree of Another Tree: https://leetcode.com/problems/subtree-of-another-tree/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sameTree(self, t1, t2):
if t1 is None and t2 is None:
return True
if t1 is None or t2 is None:
return False
return t1.val == t2.val and self.sameTree(t1.left, t2.left) and self.sameTree(t1.right, t2.right)
def dfs(self, cur):
if cur:
if self.sameTree(cur, self.t):
yield True
for x in self.dfs(cur.left):
yield x
for x in self.dfs(cur.right):
yield x
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
self.t = t
for _ in self.dfs(s):
return True
return False
|
<commit_before><commit_msg>Add py solution for 572. Subtree of Another Tree
572. Subtree of Another Tree: https://leetcode.com/problems/subtree-of-another-tree/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sameTree(self, t1, t2):
if t1 is None and t2 is None:
return True
if t1 is None or t2 is None:
return False
return t1.val == t2.val and self.sameTree(t1.left, t2.left) and self.sameTree(t1.right, t2.right)
def dfs(self, cur):
if cur:
if self.sameTree(cur, self.t):
yield True
for x in self.dfs(cur.left):
yield x
for x in self.dfs(cur.right):
yield x
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
self.t = t
for _ in self.dfs(s):
return True
return False
|
Add py solution for 572. Subtree of Another Tree
572. Subtree of Another Tree: https://leetcode.com/problems/subtree-of-another-tree/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sameTree(self, t1, t2):
if t1 is None and t2 is None:
return True
if t1 is None or t2 is None:
return False
return t1.val == t2.val and self.sameTree(t1.left, t2.left) and self.sameTree(t1.right, t2.right)
def dfs(self, cur):
if cur:
if self.sameTree(cur, self.t):
yield True
for x in self.dfs(cur.left):
yield x
for x in self.dfs(cur.right):
yield x
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
self.t = t
for _ in self.dfs(s):
return True
return False
|
<commit_before><commit_msg>Add py solution for 572. Subtree of Another Tree
572. Subtree of Another Tree: https://leetcode.com/problems/subtree-of-another-tree/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sameTree(self, t1, t2):
if t1 is None and t2 is None:
return True
if t1 is None or t2 is None:
return False
return t1.val == t2.val and self.sameTree(t1.left, t2.left) and self.sameTree(t1.right, t2.right)
def dfs(self, cur):
if cur:
if self.sameTree(cur, self.t):
yield True
for x in self.dfs(cur.left):
yield x
for x in self.dfs(cur.right):
yield x
def isSubtree(self, s, t):
"""
:type s: TreeNode
:type t: TreeNode
:rtype: bool
"""
self.t = t
for _ in self.dfs(s):
return True
return False
|
|
ed78dc7ce37ebaedba5a59d80e2d8821c711a44d
|
nx_cx_Freeze/__init__.py
|
nx_cx_Freeze/__init__.py
|
"""cx_Freeze extension
Extends the 'build' command with the 'exe-command' option to allow using a
different command from 'build_exe' to build executables from Python scripts.
"""
import sys
import distutils.command.build
from cx_Freeze.dist import build as cx_build
from cx_Freeze.dist import setup as cx_setup
from cx_Freeze.dist import _AddCommandClass
class build(cx_build):
cx_build.user_options.append(
('exe-command=', None, "Python script executables command"))
def initialize_options(self):
cx_build.initialize_options(self)
self.exe_command = 'build_exe'
def get_sub_commands(self):
subCommands = distutils.command.build.build.get_sub_commands(self)
if self.distribution.executables:
subCommands.append(self.exe_command)
return subCommands
# Override cx_Freeze setup to override build command.
def setup(**attrs):
commandClasses = attrs.setdefault("cmdclass", {})
_AddCommandClass(commandClasses, "build", build)
cx_setup(**attrs)
|
Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts
|
NXP-13818: Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts
|
Python
|
lgpl-2.1
|
IsaacYangSLA/nuxeo-drive,arameshkumar/base-nuxeo-drive,arameshkumar/base-nuxeo-drive,rsoumyassdi/nuxeo-drive,rsoumyassdi/nuxeo-drive,arameshkumar/nuxeo-drive,IsaacYangSLA/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,ssdi-drive/nuxeo-drive,rsoumyassdi/nuxeo-drive,DirkHoffmann/nuxeo-drive,DirkHoffmann/nuxeo-drive,rsoumyassdi/nuxeo-drive,arameshkumar/base-nuxeo-drive,arameshkumar/base-nuxeo-drive,arameshkumar/nuxeo-drive,arameshkumar/nuxeo-drive,IsaacYangSLA/nuxeo-drive,IsaacYangSLA/nuxeo-drive,DirkHoffmann/nuxeo-drive,loopingz/nuxeo-drive,loopingz/nuxeo-drive,DirkHoffmann/nuxeo-drive,arameshkumar/nuxeo-drive,IsaacYangSLA/nuxeo-drive,ssdi-drive/nuxeo-drive,ssdi-drive/nuxeo-drive,loopingz/nuxeo-drive,DirkHoffmann/nuxeo-drive
|
NXP-13818: Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts
|
"""cx_Freeze extension
Extends the 'build' command with the 'exe-command' option to allow using a
different command from 'build_exe' to build executables from Python scripts.
"""
import sys
import distutils.command.build
from cx_Freeze.dist import build as cx_build
from cx_Freeze.dist import setup as cx_setup
from cx_Freeze.dist import _AddCommandClass
class build(cx_build):
cx_build.user_options.append(
('exe-command=', None, "Python script executables command"))
def initialize_options(self):
cx_build.initialize_options(self)
self.exe_command = 'build_exe'
def get_sub_commands(self):
subCommands = distutils.command.build.build.get_sub_commands(self)
if self.distribution.executables:
subCommands.append(self.exe_command)
return subCommands
# Override cx_Freeze setup to override build command.
def setup(**attrs):
commandClasses = attrs.setdefault("cmdclass", {})
_AddCommandClass(commandClasses, "build", build)
cx_setup(**attrs)
|
<commit_before><commit_msg>NXP-13818: Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts<commit_after>
|
"""cx_Freeze extension
Extends the 'build' command with the 'exe-command' option to allow using a
different command from 'build_exe' to build executables from Python scripts.
"""
import sys
import distutils.command.build
from cx_Freeze.dist import build as cx_build
from cx_Freeze.dist import setup as cx_setup
from cx_Freeze.dist import _AddCommandClass
class build(cx_build):
cx_build.user_options.append(
('exe-command=', None, "Python script executables command"))
def initialize_options(self):
cx_build.initialize_options(self)
self.exe_command = 'build_exe'
def get_sub_commands(self):
subCommands = distutils.command.build.build.get_sub_commands(self)
if self.distribution.executables:
subCommands.append(self.exe_command)
return subCommands
# Override cx_Freeze setup to override build command.
def setup(**attrs):
commandClasses = attrs.setdefault("cmdclass", {})
_AddCommandClass(commandClasses, "build", build)
cx_setup(**attrs)
|
NXP-13818: Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts"""cx_Freeze extension
Extends the 'build' command with the 'exe-command' option to allow using a
different command from 'build_exe' to build executables from Python scripts.
"""
import sys
import distutils.command.build
from cx_Freeze.dist import build as cx_build
from cx_Freeze.dist import setup as cx_setup
from cx_Freeze.dist import _AddCommandClass
class build(cx_build):
cx_build.user_options.append(
('exe-command=', None, "Python script executables command"))
def initialize_options(self):
cx_build.initialize_options(self)
self.exe_command = 'build_exe'
def get_sub_commands(self):
subCommands = distutils.command.build.build.get_sub_commands(self)
if self.distribution.executables:
subCommands.append(self.exe_command)
return subCommands
# Override cx_Freeze setup to override build command.
def setup(**attrs):
commandClasses = attrs.setdefault("cmdclass", {})
_AddCommandClass(commandClasses, "build", build)
cx_setup(**attrs)
|
<commit_before><commit_msg>NXP-13818: Extend cx_Freeze build class with the exe-command option to allow using a different command from build_exe to build executables from Python scripts<commit_after>"""cx_Freeze extension
Extends the 'build' command with the 'exe-command' option to allow using a
different command from 'build_exe' to build executables from Python scripts.
"""
import sys
import distutils.command.build
from cx_Freeze.dist import build as cx_build
from cx_Freeze.dist import setup as cx_setup
from cx_Freeze.dist import _AddCommandClass
class build(cx_build):
cx_build.user_options.append(
('exe-command=', None, "Python script executables command"))
def initialize_options(self):
cx_build.initialize_options(self)
self.exe_command = 'build_exe'
def get_sub_commands(self):
subCommands = distutils.command.build.build.get_sub_commands(self)
if self.distribution.executables:
subCommands.append(self.exe_command)
return subCommands
# Override cx_Freeze setup to override build command.
def setup(**attrs):
commandClasses = attrs.setdefault("cmdclass", {})
_AddCommandClass(commandClasses, "build", build)
cx_setup(**attrs)
|
|
103090f79c30ef018be383df745998601626f918
|
tests/test_apiv2_controller.py
|
tests/test_apiv2_controller.py
|
import unittest2
import webtest
import json
import webapp2
from google.appengine.ext import testbed
from controllers.api.api_event_controller import ApiEventController
class TestApiController(unittest2.TestCase):
def setUp(self):
# Use ApiEventController as a random API controller to test on
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
self.testbed.deactivate()
def test_validate_tba_app_id(self):
# Fail
response = self.testapp.get('/2010sc', expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': ''}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': '::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Pass, event not in database
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:a'}, expect_errors=True)
self.assertEqual(response.status_code, 404)
self.assertTrue('404' in response.json)
|
Add test cases for X-TBA-App-Id
|
api: Add test cases for X-TBA-App-Id
|
Python
|
mit
|
jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,1fish2/the-blue-alliance
|
api: Add test cases for X-TBA-App-Id
|
import unittest2
import webtest
import json
import webapp2
from google.appengine.ext import testbed
from controllers.api.api_event_controller import ApiEventController
class TestApiController(unittest2.TestCase):
def setUp(self):
# Use ApiEventController as a random API controller to test on
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
self.testbed.deactivate()
def test_validate_tba_app_id(self):
# Fail
response = self.testapp.get('/2010sc', expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': ''}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': '::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Pass, event not in database
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:a'}, expect_errors=True)
self.assertEqual(response.status_code, 404)
self.assertTrue('404' in response.json)
|
<commit_before><commit_msg>api: Add test cases for X-TBA-App-Id<commit_after>
|
import unittest2
import webtest
import json
import webapp2
from google.appengine.ext import testbed
from controllers.api.api_event_controller import ApiEventController
class TestApiController(unittest2.TestCase):
def setUp(self):
# Use ApiEventController as a random API controller to test on
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
self.testbed.deactivate()
def test_validate_tba_app_id(self):
# Fail
response = self.testapp.get('/2010sc', expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': ''}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': '::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Pass, event not in database
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:a'}, expect_errors=True)
self.assertEqual(response.status_code, 404)
self.assertTrue('404' in response.json)
|
api: Add test cases for X-TBA-App-Idimport unittest2
import webtest
import json
import webapp2
from google.appengine.ext import testbed
from controllers.api.api_event_controller import ApiEventController
class TestApiController(unittest2.TestCase):
def setUp(self):
# Use ApiEventController as a random API controller to test on
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
self.testbed.deactivate()
def test_validate_tba_app_id(self):
# Fail
response = self.testapp.get('/2010sc', expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': ''}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': '::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Pass, event not in database
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:a'}, expect_errors=True)
self.assertEqual(response.status_code, 404)
self.assertTrue('404' in response.json)
|
<commit_before><commit_msg>api: Add test cases for X-TBA-App-Id<commit_after>import unittest2
import webtest
import json
import webapp2
from google.appengine.ext import testbed
from controllers.api.api_event_controller import ApiEventController
class TestApiController(unittest2.TestCase):
def setUp(self):
# Use ApiEventController as a random API controller to test on
app = webapp2.WSGIApplication([webapp2.Route(r'/<event_key:>', ApiEventController, methods=['GET'])], debug=True)
self.testapp = webtest.TestApp(app)
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub()
def tearDown(self):
self.testbed.deactivate()
def test_validate_tba_app_id(self):
# Fail
response = self.testapp.get('/2010sc', expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': ''}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': '::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a::'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Fail
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:'}, expect_errors=True)
self.assertEqual(response.status_code, 400)
self.assertTrue('Error' in response.json)
# Pass, event not in database
response = self.testapp.get('/2010sc', headers={'X-TBA-App-Id': 'a:a:a'}, expect_errors=True)
self.assertEqual(response.status_code, 404)
self.assertTrue('404' in response.json)
|
|
26b3423fb2ff9c05f00cba6e57b47174ec248804
|
test/skills/scheduled_skills.py
|
test/skills/scheduled_skills.py
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
Correct test criteria for time format for scheduled skill.
|
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
|
Python
|
apache-2.0
|
linuxipho/mycroft-core,aatchison/mycroft-core,Dark5ide/mycroft-core,MycroftAI/mycroft-core,Dark5ide/mycroft-core,aatchison/mycroft-core,MycroftAI/mycroft-core,linuxipho/mycroft-core,forslund/mycroft-core,forslund/mycroft-core
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
<commit_before>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
<commit_msg>Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
<commit_before>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%A, %B %d, %Y at %H:%M"))
<commit_msg>Correct test criteria for time format for scheduled skill.
Now matches current behaviour, previous behaviour is not a good idea since it depended on Locale.<commit_after>from datetime import datetime, timedelta
import unittest
from mycroft.skills.scheduled_skills import ScheduledSkill
from mycroft.util.log import getLogger
__author__ = 'eward'
logger = getLogger(__name__)
class ScheduledSkillTest(unittest.TestCase):
skill = ScheduledSkill(name='ScheduledSkillTest')
def test_formatted_time_today_hours(self):
date = datetime.now() + timedelta(hours=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 hours and 59 minutes from now")
def test_formatted_time_today_min(self):
date = datetime.now() + timedelta(minutes=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
"1 minutes and 59 seconds from now")
def test_formatted_time_days(self):
date = datetime.now() + timedelta(days=2)
self.assertEquals(self.skill.
get_formatted_time(float(date.strftime('%s'))),
date.strftime("%d %B, %Y at %H:%M"))
|
4da778d9f0705505a6e38989ac55cf4cbc7db83a
|
capture_nids.py
|
capture_nids.py
|
import os
import json
import time, datetime
import parsers
import logging
import csv
import re
import requests
from sqlalchemy import types
from backend.app import app, db
from backend.models import *
from backend.search import Search
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy import Date, cast
def strip_rtf(rtf_str):
if rtf_str:
return unicode(rtf_str.replace('\\"', '').replace('\\r', '').replace('\\n', '').replace('\\t', ''))
else:
return None
def capture_meeting_report_nids():
i = 0
duplicate_count = 0
not_found_count = 0
db.session.commit()
logger.debug("reading report.json")
with open('data/report.json', 'r') as f:
for line in f.readlines():
i += 1
report = json.loads(line)
nid = int(report.get('nid'))
title = strip_rtf(report.get('title'))
date = None
try:
timestamp = int(report['meeting_date'].strip('"'))
date = datetime.datetime.fromtimestamp(timestamp, tz=tz.gettz('UTC'))
except (TypeError, AttributeError, KeyError) as e:
pass
if nid and title:
try:
tmp_query = CommitteeMeeting.query.filter_by(title=title)
if date:
tmp_query = tmp_query.filter_by(date=date)
committee_meeting = tmp_query.one()
committee_meeting.nid = nid
db.session.add(committee_meeting)
except NoResultFound as e:
not_found_count += 1
except MultipleResultsFound as e:
duplicate_count += 1
pass
if i % 100 == 0:
print "saving 100 committee meeting reports to the db (" + str(i) + " so far)"
db.session.commit()
if i % 1000 == 0:
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
db.session.commit()
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
return
if __name__ == '__main__':
Search.reindex_changes = False
capture_meeting_report_nids()
|
Add script for capturing meeting report nids from dump.
|
Add script for capturing meeting report nids from dump.
|
Python
|
apache-2.0
|
Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2
|
Add script for capturing meeting report nids from dump.
|
import os
import json
import time, datetime
import parsers
import logging
import csv
import re
import requests
from sqlalchemy import types
from backend.app import app, db
from backend.models import *
from backend.search import Search
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy import Date, cast
def strip_rtf(rtf_str):
if rtf_str:
return unicode(rtf_str.replace('\\"', '').replace('\\r', '').replace('\\n', '').replace('\\t', ''))
else:
return None
def capture_meeting_report_nids():
i = 0
duplicate_count = 0
not_found_count = 0
db.session.commit()
logger.debug("reading report.json")
with open('data/report.json', 'r') as f:
for line in f.readlines():
i += 1
report = json.loads(line)
nid = int(report.get('nid'))
title = strip_rtf(report.get('title'))
date = None
try:
timestamp = int(report['meeting_date'].strip('"'))
date = datetime.datetime.fromtimestamp(timestamp, tz=tz.gettz('UTC'))
except (TypeError, AttributeError, KeyError) as e:
pass
if nid and title:
try:
tmp_query = CommitteeMeeting.query.filter_by(title=title)
if date:
tmp_query = tmp_query.filter_by(date=date)
committee_meeting = tmp_query.one()
committee_meeting.nid = nid
db.session.add(committee_meeting)
except NoResultFound as e:
not_found_count += 1
except MultipleResultsFound as e:
duplicate_count += 1
pass
if i % 100 == 0:
print "saving 100 committee meeting reports to the db (" + str(i) + " so far)"
db.session.commit()
if i % 1000 == 0:
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
db.session.commit()
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
return
if __name__ == '__main__':
Search.reindex_changes = False
capture_meeting_report_nids()
|
<commit_before><commit_msg>Add script for capturing meeting report nids from dump.<commit_after>
|
import os
import json
import time, datetime
import parsers
import logging
import csv
import re
import requests
from sqlalchemy import types
from backend.app import app, db
from backend.models import *
from backend.search import Search
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy import Date, cast
def strip_rtf(rtf_str):
if rtf_str:
return unicode(rtf_str.replace('\\"', '').replace('\\r', '').replace('\\n', '').replace('\\t', ''))
else:
return None
def capture_meeting_report_nids():
i = 0
duplicate_count = 0
not_found_count = 0
db.session.commit()
logger.debug("reading report.json")
with open('data/report.json', 'r') as f:
for line in f.readlines():
i += 1
report = json.loads(line)
nid = int(report.get('nid'))
title = strip_rtf(report.get('title'))
date = None
try:
timestamp = int(report['meeting_date'].strip('"'))
date = datetime.datetime.fromtimestamp(timestamp, tz=tz.gettz('UTC'))
except (TypeError, AttributeError, KeyError) as e:
pass
if nid and title:
try:
tmp_query = CommitteeMeeting.query.filter_by(title=title)
if date:
tmp_query = tmp_query.filter_by(date=date)
committee_meeting = tmp_query.one()
committee_meeting.nid = nid
db.session.add(committee_meeting)
except NoResultFound as e:
not_found_count += 1
except MultipleResultsFound as e:
duplicate_count += 1
pass
if i % 100 == 0:
print "saving 100 committee meeting reports to the db (" + str(i) + " so far)"
db.session.commit()
if i % 1000 == 0:
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
db.session.commit()
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
return
if __name__ == '__main__':
Search.reindex_changes = False
capture_meeting_report_nids()
|
Add script for capturing meeting report nids from dump.import os
import json
import time, datetime
import parsers
import logging
import csv
import re
import requests
from sqlalchemy import types
from backend.app import app, db
from backend.models import *
from backend.search import Search
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy import Date, cast
def strip_rtf(rtf_str):
if rtf_str:
return unicode(rtf_str.replace('\\"', '').replace('\\r', '').replace('\\n', '').replace('\\t', ''))
else:
return None
def capture_meeting_report_nids():
i = 0
duplicate_count = 0
not_found_count = 0
db.session.commit()
logger.debug("reading report.json")
with open('data/report.json', 'r') as f:
for line in f.readlines():
i += 1
report = json.loads(line)
nid = int(report.get('nid'))
title = strip_rtf(report.get('title'))
date = None
try:
timestamp = int(report['meeting_date'].strip('"'))
date = datetime.datetime.fromtimestamp(timestamp, tz=tz.gettz('UTC'))
except (TypeError, AttributeError, KeyError) as e:
pass
if nid and title:
try:
tmp_query = CommitteeMeeting.query.filter_by(title=title)
if date:
tmp_query = tmp_query.filter_by(date=date)
committee_meeting = tmp_query.one()
committee_meeting.nid = nid
db.session.add(committee_meeting)
except NoResultFound as e:
not_found_count += 1
except MultipleResultsFound as e:
duplicate_count += 1
pass
if i % 100 == 0:
print "saving 100 committee meeting reports to the db (" + str(i) + " so far)"
db.session.commit()
if i % 1000 == 0:
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
db.session.commit()
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
return
if __name__ == '__main__':
Search.reindex_changes = False
capture_meeting_report_nids()
|
<commit_before><commit_msg>Add script for capturing meeting report nids from dump.<commit_after>import os
import json
import time, datetime
import parsers
import logging
import csv
import re
import requests
from sqlalchemy import types
from backend.app import app, db
from backend.models import *
from backend.search import Search
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
from sqlalchemy import Date, cast
def strip_rtf(rtf_str):
if rtf_str:
return unicode(rtf_str.replace('\\"', '').replace('\\r', '').replace('\\n', '').replace('\\t', ''))
else:
return None
def capture_meeting_report_nids():
i = 0
duplicate_count = 0
not_found_count = 0
db.session.commit()
logger.debug("reading report.json")
with open('data/report.json', 'r') as f:
for line in f.readlines():
i += 1
report = json.loads(line)
nid = int(report.get('nid'))
title = strip_rtf(report.get('title'))
date = None
try:
timestamp = int(report['meeting_date'].strip('"'))
date = datetime.datetime.fromtimestamp(timestamp, tz=tz.gettz('UTC'))
except (TypeError, AttributeError, KeyError) as e:
pass
if nid and title:
try:
tmp_query = CommitteeMeeting.query.filter_by(title=title)
if date:
tmp_query = tmp_query.filter_by(date=date)
committee_meeting = tmp_query.one()
committee_meeting.nid = nid
db.session.add(committee_meeting)
except NoResultFound as e:
not_found_count += 1
except MultipleResultsFound as e:
duplicate_count += 1
pass
if i % 100 == 0:
print "saving 100 committee meeting reports to the db (" + str(i) + " so far)"
db.session.commit()
if i % 1000 == 0:
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
db.session.commit()
print duplicate_count, "duplicates could not be matched"
print not_found_count, "reports could not be found at all"
return
if __name__ == '__main__':
Search.reindex_changes = False
capture_meeting_report_nids()
|
|
e614115b8b31220c5d577d42fdf9ac4202874c8e
|
events/migrations/0008_auto_20151127_1019.py
|
events/migrations/0008_auto_20151127_1019.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '0007_organization_admin_users'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='admin_users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
Add forgotten migration for Organization.admin_users field's blank=True.
|
Add forgotten migration for Organization.admin_users field's blank=True.
|
Python
|
mit
|
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,tuomas777/linkedevents,tuomas777/linkedevents,aapris/linkedevents,kooditiimi/linkedevents,kooditiimi/linkedevents,kooditiimi/linkedevents,kooditiimi/linkedevents,tuomas777/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents,aapris/linkedevents
|
Add forgotten migration for Organization.admin_users field's blank=True.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '0007_organization_admin_users'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='admin_users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
<commit_before><commit_msg>Add forgotten migration for Organization.admin_users field's blank=True.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '0007_organization_admin_users'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='admin_users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
Add forgotten migration for Organization.admin_users field's blank=True.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '0007_organization_admin_users'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='admin_users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
<commit_before><commit_msg>Add forgotten migration for Organization.admin_users field's blank=True.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
('events', '0007_organization_admin_users'),
]
operations = [
migrations.AlterField(
model_name='organization',
name='admin_users',
field=models.ManyToManyField(to=settings.AUTH_USER_MODEL, blank=True),
),
]
|
|
f94f2ee4e61c683d2e05e1797ca3752d46b27a47
|
scripts/CalculateLocalityFDR.py
|
scripts/CalculateLocalityFDR.py
|
import camoco as co
import glob
import pandas as pd
import os
def read_FDR(glob_path):
dfs = []
for x in glob.glob(glob_path):
df = pd.read_table(x,sep=',')
net,gwas,win,flank,*junk = os.path.basename(x).replace('.','_').split('_')
if 'WindowSize' not in df.columns:
df.insert(0,'WindowSize',win)
if 'NumFlank' not in df.columns:
df.insert(0,'NumFlank',flank)
dfs.append(df)
df = pd.concat(dfs)
# I guess we forgot this before
df.insert(4,'TraitType','Element')
df.loc[[x.startswith('Log') for x in df.Term],'TraitType'] = 'Log'
df.loc[[x.startswith('PCA') for x in df.Term],'TraitType'] = 'PCA'
df.loc[[x.startswith('Trans') for x in df.Term],'TraitType'] = 'Trans'
return df
def groupedFDR(df,by='term'):
def grouped_agg(x):
return pd.DataFrame(
{
'Tot': sum(x.numReal),
'FDR10':sum(x[x.FDR<=0.1].numReal),
'FDR35':sum(x[x.FDR<=0.35].numReal),
'FDR50':sum(x[x.FDR<=.5].numReal)
},index=[None]
)
if by == 'term':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType','Term']
elif by == 'trait':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType']
return df.reset_index().groupby(groups).apply(grouped_agg)
|
Create scripts making downstream aggragation and stats easier.
|
Create scripts making downstream aggragation and stats easier.
|
Python
|
mit
|
schae234/Camoco,schae234/Camoco
|
Create scripts making downstream aggragation and stats easier.
|
import camoco as co
import glob
import pandas as pd
import os
def read_FDR(glob_path):
dfs = []
for x in glob.glob(glob_path):
df = pd.read_table(x,sep=',')
net,gwas,win,flank,*junk = os.path.basename(x).replace('.','_').split('_')
if 'WindowSize' not in df.columns:
df.insert(0,'WindowSize',win)
if 'NumFlank' not in df.columns:
df.insert(0,'NumFlank',flank)
dfs.append(df)
df = pd.concat(dfs)
# I guess we forgot this before
df.insert(4,'TraitType','Element')
df.loc[[x.startswith('Log') for x in df.Term],'TraitType'] = 'Log'
df.loc[[x.startswith('PCA') for x in df.Term],'TraitType'] = 'PCA'
df.loc[[x.startswith('Trans') for x in df.Term],'TraitType'] = 'Trans'
return df
def groupedFDR(df,by='term'):
def grouped_agg(x):
return pd.DataFrame(
{
'Tot': sum(x.numReal),
'FDR10':sum(x[x.FDR<=0.1].numReal),
'FDR35':sum(x[x.FDR<=0.35].numReal),
'FDR50':sum(x[x.FDR<=.5].numReal)
},index=[None]
)
if by == 'term':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType','Term']
elif by == 'trait':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType']
return df.reset_index().groupby(groups).apply(grouped_agg)
|
<commit_before><commit_msg>Create scripts making downstream aggragation and stats easier.<commit_after>
|
import camoco as co
import glob
import pandas as pd
import os
def read_FDR(glob_path):
dfs = []
for x in glob.glob(glob_path):
df = pd.read_table(x,sep=',')
net,gwas,win,flank,*junk = os.path.basename(x).replace('.','_').split('_')
if 'WindowSize' not in df.columns:
df.insert(0,'WindowSize',win)
if 'NumFlank' not in df.columns:
df.insert(0,'NumFlank',flank)
dfs.append(df)
df = pd.concat(dfs)
# I guess we forgot this before
df.insert(4,'TraitType','Element')
df.loc[[x.startswith('Log') for x in df.Term],'TraitType'] = 'Log'
df.loc[[x.startswith('PCA') for x in df.Term],'TraitType'] = 'PCA'
df.loc[[x.startswith('Trans') for x in df.Term],'TraitType'] = 'Trans'
return df
def groupedFDR(df,by='term'):
def grouped_agg(x):
return pd.DataFrame(
{
'Tot': sum(x.numReal),
'FDR10':sum(x[x.FDR<=0.1].numReal),
'FDR35':sum(x[x.FDR<=0.35].numReal),
'FDR50':sum(x[x.FDR<=.5].numReal)
},index=[None]
)
if by == 'term':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType','Term']
elif by == 'trait':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType']
return df.reset_index().groupby(groups).apply(grouped_agg)
|
Create scripts making downstream aggragation and stats easier.import camoco as co
import glob
import pandas as pd
import os
def read_FDR(glob_path):
dfs = []
for x in glob.glob(glob_path):
df = pd.read_table(x,sep=',')
net,gwas,win,flank,*junk = os.path.basename(x).replace('.','_').split('_')
if 'WindowSize' not in df.columns:
df.insert(0,'WindowSize',win)
if 'NumFlank' not in df.columns:
df.insert(0,'NumFlank',flank)
dfs.append(df)
df = pd.concat(dfs)
# I guess we forgot this before
df.insert(4,'TraitType','Element')
df.loc[[x.startswith('Log') for x in df.Term],'TraitType'] = 'Log'
df.loc[[x.startswith('PCA') for x in df.Term],'TraitType'] = 'PCA'
df.loc[[x.startswith('Trans') for x in df.Term],'TraitType'] = 'Trans'
return df
def groupedFDR(df,by='term'):
def grouped_agg(x):
return pd.DataFrame(
{
'Tot': sum(x.numReal),
'FDR10':sum(x[x.FDR<=0.1].numReal),
'FDR35':sum(x[x.FDR<=0.35].numReal),
'FDR50':sum(x[x.FDR<=.5].numReal)
},index=[None]
)
if by == 'term':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType','Term']
elif by == 'trait':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType']
return df.reset_index().groupby(groups).apply(grouped_agg)
|
<commit_before><commit_msg>Create scripts making downstream aggragation and stats easier.<commit_after>import camoco as co
import glob
import pandas as pd
import os
def read_FDR(glob_path):
dfs = []
for x in glob.glob(glob_path):
df = pd.read_table(x,sep=',')
net,gwas,win,flank,*junk = os.path.basename(x).replace('.','_').split('_')
if 'WindowSize' not in df.columns:
df.insert(0,'WindowSize',win)
if 'NumFlank' not in df.columns:
df.insert(0,'NumFlank',flank)
dfs.append(df)
df = pd.concat(dfs)
# I guess we forgot this before
df.insert(4,'TraitType','Element')
df.loc[[x.startswith('Log') for x in df.Term],'TraitType'] = 'Log'
df.loc[[x.startswith('PCA') for x in df.Term],'TraitType'] = 'PCA'
df.loc[[x.startswith('Trans') for x in df.Term],'TraitType'] = 'Trans'
return df
def groupedFDR(df,by='term'):
def grouped_agg(x):
return pd.DataFrame(
{
'Tot': sum(x.numReal),
'FDR10':sum(x[x.FDR<=0.1].numReal),
'FDR35':sum(x[x.FDR<=0.35].numReal),
'FDR50':sum(x[x.FDR<=.5].numReal)
},index=[None]
)
if by == 'term':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType','Term']
elif by == 'trait':
groups = ['Ontology','COB','WindowSize','NumFlank','TraitType']
return df.reset_index().groupby(groups).apply(grouped_agg)
|
|
45d1aa0c6aa7af3f097416f0215c5629706437ca
|
src/reindex_all_marks.py
|
src/reindex_all_marks.py
|
#!/usr/bin/env python
import convenience as c
import jobs
def main():
c.load_settings()
logger = c.logger("reindex_all_bookmarks")
db = c.db()
records = db.marks.find({":": {"$exists": False}})
number = records.count()
count = 1
for record in records:
logger.info("{count} of {number} {who}/{when}".format(
count=count,
number=number,
who=record["@"],
when=record["~"]))
logger.debug(type(record["~"]))
jobs.enqueue(jobs.IndexRecord(record))
count += 1
logger.info("Done")
if __name__ == "__main__":
main()
|
Add admin task to reindex all marks
|
Add admin task to reindex all marks
|
Python
|
agpl-3.0
|
calpaterson/recall,calpaterson/recall,calpaterson/recall
|
Add admin task to reindex all marks
|
#!/usr/bin/env python
import convenience as c
import jobs
def main():
c.load_settings()
logger = c.logger("reindex_all_bookmarks")
db = c.db()
records = db.marks.find({":": {"$exists": False}})
number = records.count()
count = 1
for record in records:
logger.info("{count} of {number} {who}/{when}".format(
count=count,
number=number,
who=record["@"],
when=record["~"]))
logger.debug(type(record["~"]))
jobs.enqueue(jobs.IndexRecord(record))
count += 1
logger.info("Done")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add admin task to reindex all marks<commit_after>
|
#!/usr/bin/env python
import convenience as c
import jobs
def main():
c.load_settings()
logger = c.logger("reindex_all_bookmarks")
db = c.db()
records = db.marks.find({":": {"$exists": False}})
number = records.count()
count = 1
for record in records:
logger.info("{count} of {number} {who}/{when}".format(
count=count,
number=number,
who=record["@"],
when=record["~"]))
logger.debug(type(record["~"]))
jobs.enqueue(jobs.IndexRecord(record))
count += 1
logger.info("Done")
if __name__ == "__main__":
main()
|
Add admin task to reindex all marks#!/usr/bin/env python
import convenience as c
import jobs
def main():
c.load_settings()
logger = c.logger("reindex_all_bookmarks")
db = c.db()
records = db.marks.find({":": {"$exists": False}})
number = records.count()
count = 1
for record in records:
logger.info("{count} of {number} {who}/{when}".format(
count=count,
number=number,
who=record["@"],
when=record["~"]))
logger.debug(type(record["~"]))
jobs.enqueue(jobs.IndexRecord(record))
count += 1
logger.info("Done")
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add admin task to reindex all marks<commit_after>#!/usr/bin/env python
import convenience as c
import jobs
def main():
c.load_settings()
logger = c.logger("reindex_all_bookmarks")
db = c.db()
records = db.marks.find({":": {"$exists": False}})
number = records.count()
count = 1
for record in records:
logger.info("{count} of {number} {who}/{when}".format(
count=count,
number=number,
who=record["@"],
when=record["~"]))
logger.debug(type(record["~"]))
jobs.enqueue(jobs.IndexRecord(record))
count += 1
logger.info("Done")
if __name__ == "__main__":
main()
|
|
8e1ebcad18ddde87cdf6a46d673f90030cfe4e26
|
q3/FindAllAbbreviations.py
|
q3/FindAllAbbreviations.py
|
import sys
def prependAbbrev(front, abbr):
if type(front) is type(abbr[0]):
return [front + abbr[0]] + abbr[1:]
else:
return [front] + abbr
def prefixAll(p, lst):
return [prependAbbrev(p, l) for l in lst]
def findAllAbbrev(s):
if len(s) == 1:
return [[s], [1]]
else:
rest = findAllAbbrev(s[1:])
return prefixAll(s[0], rest) + prefixAll(1, rest)
for s in findAllAbbrev(sys.argv[1]):
print ''.join([str(i) for i in s])
|
Add python answer to q3
|
Add python answer to q3
|
Python
|
mit
|
UW-UPL/InterviewPrepJan2016,UW-UPL/InterviewPrepJan2016,UW-UPL/InterviewPrepJan2016,UW-UPL/InterviewPrepJan2016
|
Add python answer to q3
|
import sys
def prependAbbrev(front, abbr):
if type(front) is type(abbr[0]):
return [front + abbr[0]] + abbr[1:]
else:
return [front] + abbr
def prefixAll(p, lst):
return [prependAbbrev(p, l) for l in lst]
def findAllAbbrev(s):
if len(s) == 1:
return [[s], [1]]
else:
rest = findAllAbbrev(s[1:])
return prefixAll(s[0], rest) + prefixAll(1, rest)
for s in findAllAbbrev(sys.argv[1]):
print ''.join([str(i) for i in s])
|
<commit_before><commit_msg>Add python answer to q3<commit_after>
|
import sys
def prependAbbrev(front, abbr):
if type(front) is type(abbr[0]):
return [front + abbr[0]] + abbr[1:]
else:
return [front] + abbr
def prefixAll(p, lst):
return [prependAbbrev(p, l) for l in lst]
def findAllAbbrev(s):
if len(s) == 1:
return [[s], [1]]
else:
rest = findAllAbbrev(s[1:])
return prefixAll(s[0], rest) + prefixAll(1, rest)
for s in findAllAbbrev(sys.argv[1]):
print ''.join([str(i) for i in s])
|
Add python answer to q3import sys
def prependAbbrev(front, abbr):
if type(front) is type(abbr[0]):
return [front + abbr[0]] + abbr[1:]
else:
return [front] + abbr
def prefixAll(p, lst):
return [prependAbbrev(p, l) for l in lst]
def findAllAbbrev(s):
if len(s) == 1:
return [[s], [1]]
else:
rest = findAllAbbrev(s[1:])
return prefixAll(s[0], rest) + prefixAll(1, rest)
for s in findAllAbbrev(sys.argv[1]):
print ''.join([str(i) for i in s])
|
<commit_before><commit_msg>Add python answer to q3<commit_after>import sys
def prependAbbrev(front, abbr):
if type(front) is type(abbr[0]):
return [front + abbr[0]] + abbr[1:]
else:
return [front] + abbr
def prefixAll(p, lst):
return [prependAbbrev(p, l) for l in lst]
def findAllAbbrev(s):
if len(s) == 1:
return [[s], [1]]
else:
rest = findAllAbbrev(s[1:])
return prefixAll(s[0], rest) + prefixAll(1, rest)
for s in findAllAbbrev(sys.argv[1]):
print ''.join([str(i) for i in s])
|
|
3bb0563daed1660bbede02737b1d3a38e306e9bc
|
build/transform-sql.py
|
build/transform-sql.py
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
Add a helper script which will transform a file full of SQL commands into a header file suitable for inclusion in a standard C file and use with the Subversion SQLite APIs. The goal here is that we can maintain our SQL schema directly as such, and let this script do the appropriate transformation as part of autogen.sh.
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
|
Python
|
apache-2.0
|
jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
<commit_before><commit_msg>Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.<commit_after>
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
<commit_before><commit_msg>Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.<commit_after>#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
|
36fd334a4b7a1adbe578a680d7d79e338452ea1a
|
Lib/importlib/test/benchmark.py
|
Lib/importlib/test/benchmark.py
|
from . import util
from .source import util as source_util
import gc
import decimal
import imp
import importlib
import sys
import timeit
def bench_cache(import_, repeat, number):
"""Measure the time it takes to pull from sys.modules."""
name = '<benchmark import>'
with util.uncache(name):
module = imp.new_module(name)
sys.modules[name] = module
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
import_(name)
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def bench_importing_source(import_, repeat, number, loc=100000):
"""Measure importing source from disk.
For worst-case scenario, the line endings are \\r\\n and thus require
universal newline translation.
"""
name = '__benchmark'
with source_util.create_modules(name) as mapping:
with open(mapping[name], 'w') as file:
for x in range(loc):
file.write("{0}\r\n".format(x))
with util.import_state(path=[mapping['.root']]):
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
try:
import_(name)
finally:
del sys.modules[name]
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def main(import_):
args = [('sys.modules', bench_cache, 5, 500000),
('source', bench_importing_source, 5, 10000)]
test_msg = "{test}, {number} times (best of {repeat}):"
result_msg = "{result:.2f} secs"
gc.disable()
try:
for name, meth, repeat, number in args:
result = meth(import_, repeat, number)
print(test_msg.format(test=name, repeat=repeat,
number=number).ljust(40),
result_msg.format(result=result).rjust(10))
finally:
gc.enable()
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
options, args = parser.parse_args()
if args:
raise RuntimeError("unrecognized args: {0}".format(args))
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
main(import_)
|
Add simple tests for __import__ for future optimizations to importlib.
|
Add simple tests for __import__ for future optimizations to importlib.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Add simple tests for __import__ for future optimizations to importlib.
|
from . import util
from .source import util as source_util
import gc
import decimal
import imp
import importlib
import sys
import timeit
def bench_cache(import_, repeat, number):
"""Measure the time it takes to pull from sys.modules."""
name = '<benchmark import>'
with util.uncache(name):
module = imp.new_module(name)
sys.modules[name] = module
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
import_(name)
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def bench_importing_source(import_, repeat, number, loc=100000):
"""Measure importing source from disk.
For worst-case scenario, the line endings are \\r\\n and thus require
universal newline translation.
"""
name = '__benchmark'
with source_util.create_modules(name) as mapping:
with open(mapping[name], 'w') as file:
for x in range(loc):
file.write("{0}\r\n".format(x))
with util.import_state(path=[mapping['.root']]):
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
try:
import_(name)
finally:
del sys.modules[name]
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def main(import_):
args = [('sys.modules', bench_cache, 5, 500000),
('source', bench_importing_source, 5, 10000)]
test_msg = "{test}, {number} times (best of {repeat}):"
result_msg = "{result:.2f} secs"
gc.disable()
try:
for name, meth, repeat, number in args:
result = meth(import_, repeat, number)
print(test_msg.format(test=name, repeat=repeat,
number=number).ljust(40),
result_msg.format(result=result).rjust(10))
finally:
gc.enable()
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
options, args = parser.parse_args()
if args:
raise RuntimeError("unrecognized args: {0}".format(args))
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
main(import_)
|
<commit_before><commit_msg>Add simple tests for __import__ for future optimizations to importlib.<commit_after>
|
from . import util
from .source import util as source_util
import gc
import decimal
import imp
import importlib
import sys
import timeit
def bench_cache(import_, repeat, number):
"""Measure the time it takes to pull from sys.modules."""
name = '<benchmark import>'
with util.uncache(name):
module = imp.new_module(name)
sys.modules[name] = module
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
import_(name)
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def bench_importing_source(import_, repeat, number, loc=100000):
"""Measure importing source from disk.
For worst-case scenario, the line endings are \\r\\n and thus require
universal newline translation.
"""
name = '__benchmark'
with source_util.create_modules(name) as mapping:
with open(mapping[name], 'w') as file:
for x in range(loc):
file.write("{0}\r\n".format(x))
with util.import_state(path=[mapping['.root']]):
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
try:
import_(name)
finally:
del sys.modules[name]
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def main(import_):
args = [('sys.modules', bench_cache, 5, 500000),
('source', bench_importing_source, 5, 10000)]
test_msg = "{test}, {number} times (best of {repeat}):"
result_msg = "{result:.2f} secs"
gc.disable()
try:
for name, meth, repeat, number in args:
result = meth(import_, repeat, number)
print(test_msg.format(test=name, repeat=repeat,
number=number).ljust(40),
result_msg.format(result=result).rjust(10))
finally:
gc.enable()
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
options, args = parser.parse_args()
if args:
raise RuntimeError("unrecognized args: {0}".format(args))
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
main(import_)
|
Add simple tests for __import__ for future optimizations to importlib.from . import util
from .source import util as source_util
import gc
import decimal
import imp
import importlib
import sys
import timeit
def bench_cache(import_, repeat, number):
"""Measure the time it takes to pull from sys.modules."""
name = '<benchmark import>'
with util.uncache(name):
module = imp.new_module(name)
sys.modules[name] = module
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
import_(name)
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def bench_importing_source(import_, repeat, number, loc=100000):
"""Measure importing source from disk.
For worst-case scenario, the line endings are \\r\\n and thus require
universal newline translation.
"""
name = '__benchmark'
with source_util.create_modules(name) as mapping:
with open(mapping[name], 'w') as file:
for x in range(loc):
file.write("{0}\r\n".format(x))
with util.import_state(path=[mapping['.root']]):
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
try:
import_(name)
finally:
del sys.modules[name]
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def main(import_):
args = [('sys.modules', bench_cache, 5, 500000),
('source', bench_importing_source, 5, 10000)]
test_msg = "{test}, {number} times (best of {repeat}):"
result_msg = "{result:.2f} secs"
gc.disable()
try:
for name, meth, repeat, number in args:
result = meth(import_, repeat, number)
print(test_msg.format(test=name, repeat=repeat,
number=number).ljust(40),
result_msg.format(result=result).rjust(10))
finally:
gc.enable()
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
options, args = parser.parse_args()
if args:
raise RuntimeError("unrecognized args: {0}".format(args))
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
main(import_)
|
<commit_before><commit_msg>Add simple tests for __import__ for future optimizations to importlib.<commit_after>from . import util
from .source import util as source_util
import gc
import decimal
import imp
import importlib
import sys
import timeit
def bench_cache(import_, repeat, number):
"""Measure the time it takes to pull from sys.modules."""
name = '<benchmark import>'
with util.uncache(name):
module = imp.new_module(name)
sys.modules[name] = module
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
import_(name)
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def bench_importing_source(import_, repeat, number, loc=100000):
"""Measure importing source from disk.
For worst-case scenario, the line endings are \\r\\n and thus require
universal newline translation.
"""
name = '__benchmark'
with source_util.create_modules(name) as mapping:
with open(mapping[name], 'w') as file:
for x in range(loc):
file.write("{0}\r\n".format(x))
with util.import_state(path=[mapping['.root']]):
runs = []
for x in range(repeat):
start_time = timeit.default_timer()
for y in range(number):
try:
import_(name)
finally:
del sys.modules[name]
end_time = timeit.default_timer()
runs.append(end_time - start_time)
return min(runs)
def main(import_):
args = [('sys.modules', bench_cache, 5, 500000),
('source', bench_importing_source, 5, 10000)]
test_msg = "{test}, {number} times (best of {repeat}):"
result_msg = "{result:.2f} secs"
gc.disable()
try:
for name, meth, repeat, number in args:
result = meth(import_, repeat, number)
print(test_msg.format(test=name, repeat=repeat,
number=number).ljust(40),
result_msg.format(result=result).rjust(10))
finally:
gc.enable()
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-b', '--builtin', dest='builtin', action='store_true',
default=False, help="use the built-in __import__")
options, args = parser.parse_args()
if args:
raise RuntimeError("unrecognized args: {0}".format(args))
import_ = __import__
if not options.builtin:
import_ = importlib.__import__
main(import_)
|
|
4b45f50c3321dc6b79ad6fbc167a4f895aaac869
|
src/83_Remove_Duplicates_from_Sorted_List.py
|
src/83_Remove_Duplicates_from_Sorted_List.py
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
temp = head
while temp is not None and temp.next is not None:
if temp.val == temp.next.val:
temp.next = temp.next.next
else:
temp = temp.next
return head
|
Remove Duplicates from Sorted List
|
完成了第83题: Remove Duplicates from Sorted List
|
Python
|
mit
|
ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground,ChuanleiGuo/AlgorithmsPlayground
|
完成了第83题: Remove Duplicates from Sorted List
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
temp = head
while temp is not None and temp.next is not None:
if temp.val == temp.next.val:
temp.next = temp.next.next
else:
temp = temp.next
return head
|
<commit_before><commit_msg>完成了第83题: Remove Duplicates from Sorted List<commit_after>
|
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
temp = head
while temp is not None and temp.next is not None:
if temp.val == temp.next.val:
temp.next = temp.next.next
else:
temp = temp.next
return head
|
完成了第83题: Remove Duplicates from Sorted List# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
temp = head
while temp is not None and temp.next is not None:
if temp.val == temp.next.val:
temp.next = temp.next.next
else:
temp = temp.next
return head
|
<commit_before><commit_msg>完成了第83题: Remove Duplicates from Sorted List<commit_after># Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def deleteDuplicates(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
temp = head
while temp is not None and temp.next is not None:
if temp.val == temp.next.val:
temp.next = temp.next.next
else:
temp = temp.next
return head
|
|
b03d28d21f73e8f8fabab5edcb9a0317e20c19c4
|
lexos/views/bct_view.py
|
lexos/views/bct_view.py
|
from flask import session, render_template, Blueprint
from lexos.helpers import constants
from lexos.views.base_view import detect_active_docs
from lexos.models.filemanager_model import FileManagerModel
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
dendrogram_blueprint = Blueprint('bct_analysis', __name__)
@dendrogram_blueprint.route("/bct_analysis", methods=['GET'])
def bct_analysis():
# Detect the number of active documents.
num_active_docs = detect_active_docs()
# Get labels with their ids.
id_label_map = \
FileManagerModel().load_file_manager().get_active_labels_with_id()
# Fill in default options.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'hierarchyoption' not in session:
session['bctoption'] = constants.DEFAULT_BCT_OPTIONS
# Render the HTML template.
return render_template(
'bct_analysis.html',
itm="bct-analysis",
labels=id_label_map,
numActiveDocs=num_active_docs
)
|
Add view for bct analysis
|
Add view for bct analysis
|
Python
|
mit
|
WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos
|
Add view for bct analysis
|
from flask import session, render_template, Blueprint
from lexos.helpers import constants
from lexos.views.base_view import detect_active_docs
from lexos.models.filemanager_model import FileManagerModel
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
dendrogram_blueprint = Blueprint('bct_analysis', __name__)
@dendrogram_blueprint.route("/bct_analysis", methods=['GET'])
def bct_analysis():
# Detect the number of active documents.
num_active_docs = detect_active_docs()
# Get labels with their ids.
id_label_map = \
FileManagerModel().load_file_manager().get_active_labels_with_id()
# Fill in default options.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'hierarchyoption' not in session:
session['bctoption'] = constants.DEFAULT_BCT_OPTIONS
# Render the HTML template.
return render_template(
'bct_analysis.html',
itm="bct-analysis",
labels=id_label_map,
numActiveDocs=num_active_docs
)
|
<commit_before><commit_msg>Add view for bct analysis<commit_after>
|
from flask import session, render_template, Blueprint
from lexos.helpers import constants
from lexos.views.base_view import detect_active_docs
from lexos.models.filemanager_model import FileManagerModel
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
dendrogram_blueprint = Blueprint('bct_analysis', __name__)
@dendrogram_blueprint.route("/bct_analysis", methods=['GET'])
def bct_analysis():
# Detect the number of active documents.
num_active_docs = detect_active_docs()
# Get labels with their ids.
id_label_map = \
FileManagerModel().load_file_manager().get_active_labels_with_id()
# Fill in default options.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'hierarchyoption' not in session:
session['bctoption'] = constants.DEFAULT_BCT_OPTIONS
# Render the HTML template.
return render_template(
'bct_analysis.html',
itm="bct-analysis",
labels=id_label_map,
numActiveDocs=num_active_docs
)
|
Add view for bct analysisfrom flask import session, render_template, Blueprint
from lexos.helpers import constants
from lexos.views.base_view import detect_active_docs
from lexos.models.filemanager_model import FileManagerModel
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
dendrogram_blueprint = Blueprint('bct_analysis', __name__)
@dendrogram_blueprint.route("/bct_analysis", methods=['GET'])
def bct_analysis():
# Detect the number of active documents.
num_active_docs = detect_active_docs()
# Get labels with their ids.
id_label_map = \
FileManagerModel().load_file_manager().get_active_labels_with_id()
# Fill in default options.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'hierarchyoption' not in session:
session['bctoption'] = constants.DEFAULT_BCT_OPTIONS
# Render the HTML template.
return render_template(
'bct_analysis.html',
itm="bct-analysis",
labels=id_label_map,
numActiveDocs=num_active_docs
)
|
<commit_before><commit_msg>Add view for bct analysis<commit_after>from flask import session, render_template, Blueprint
from lexos.helpers import constants
from lexos.views.base_view import detect_active_docs
from lexos.models.filemanager_model import FileManagerModel
# this is a flask blue print
# it helps us to manage groups of views
# see here for more detail:
# http://exploreflask.com/en/latest/blueprints.html
# http://flask.pocoo.org/docs/0.12/blueprints/
dendrogram_blueprint = Blueprint('bct_analysis', __name__)
@dendrogram_blueprint.route("/bct_analysis", methods=['GET'])
def bct_analysis():
# Detect the number of active documents.
num_active_docs = detect_active_docs()
# Get labels with their ids.
id_label_map = \
FileManagerModel().load_file_manager().get_active_labels_with_id()
# Fill in default options.
if 'analyoption' not in session:
session['analyoption'] = constants.DEFAULT_ANALYZE_OPTIONS
if 'hierarchyoption' not in session:
session['bctoption'] = constants.DEFAULT_BCT_OPTIONS
# Render the HTML template.
return render_template(
'bct_analysis.html',
itm="bct-analysis",
labels=id_label_map,
numActiveDocs=num_active_docs
)
|
|
48ec3105c0dafaa3595b0ee565fef24d35ed6194
|
scripts/shutdown_button.py
|
scripts/shutdown_button.py
|
#!/usr/bin/env python
# Copyright (C) 2014 Gregory S. Meiste <http://gregmeiste.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as GPIO
import os
GPIO_SHUTDOWN_BUTTON = 23
# Use the pin numbers from the ribbon cable board
GPIO.setmode(GPIO.BCM)
# Configure GPIO
GPIO.setup(GPIO_SHUTDOWN_BUTTON, GPIO.IN, pull_up_down=GPIO.PUD_UP)
print "Waiting for shutdown button press"
GPIO.wait_for_edge(GPIO_SHUTDOWN_BUTTON, GPIO.FALLING)
print "Shutting down due to button press!"
GPIO.cleanup()
os.system("shutdown -h now")
|
Add script to shutdown Raspberry Pi on button press
|
Add script to shutdown Raspberry Pi on button press
Signed-off-by: Greg Meiste <8a8f45e57c045ec63dc7e56e5eda862ea8c7cd4f@gmail.com>
|
Python
|
apache-2.0
|
meisteg/RaspberryPiTempAlarm,meisteg/RaspberryPiTempAlarm,meisteg/RaspberryPiTempAlarm
|
Add script to shutdown Raspberry Pi on button press
Signed-off-by: Greg Meiste <8a8f45e57c045ec63dc7e56e5eda862ea8c7cd4f@gmail.com>
|
#!/usr/bin/env python
# Copyright (C) 2014 Gregory S. Meiste <http://gregmeiste.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as GPIO
import os
GPIO_SHUTDOWN_BUTTON = 23
# Use the pin numbers from the ribbon cable board
GPIO.setmode(GPIO.BCM)
# Configure GPIO
GPIO.setup(GPIO_SHUTDOWN_BUTTON, GPIO.IN, pull_up_down=GPIO.PUD_UP)
print "Waiting for shutdown button press"
GPIO.wait_for_edge(GPIO_SHUTDOWN_BUTTON, GPIO.FALLING)
print "Shutting down due to button press!"
GPIO.cleanup()
os.system("shutdown -h now")
|
<commit_before><commit_msg>Add script to shutdown Raspberry Pi on button press
Signed-off-by: Greg Meiste <8a8f45e57c045ec63dc7e56e5eda862ea8c7cd4f@gmail.com><commit_after>
|
#!/usr/bin/env python
# Copyright (C) 2014 Gregory S. Meiste <http://gregmeiste.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as GPIO
import os
GPIO_SHUTDOWN_BUTTON = 23
# Use the pin numbers from the ribbon cable board
GPIO.setmode(GPIO.BCM)
# Configure GPIO
GPIO.setup(GPIO_SHUTDOWN_BUTTON, GPIO.IN, pull_up_down=GPIO.PUD_UP)
print "Waiting for shutdown button press"
GPIO.wait_for_edge(GPIO_SHUTDOWN_BUTTON, GPIO.FALLING)
print "Shutting down due to button press!"
GPIO.cleanup()
os.system("shutdown -h now")
|
Add script to shutdown Raspberry Pi on button press
Signed-off-by: Greg Meiste <8a8f45e57c045ec63dc7e56e5eda862ea8c7cd4f@gmail.com>#!/usr/bin/env python
# Copyright (C) 2014 Gregory S. Meiste <http://gregmeiste.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as GPIO
import os
GPIO_SHUTDOWN_BUTTON = 23
# Use the pin numbers from the ribbon cable board
GPIO.setmode(GPIO.BCM)
# Configure GPIO
GPIO.setup(GPIO_SHUTDOWN_BUTTON, GPIO.IN, pull_up_down=GPIO.PUD_UP)
print "Waiting for shutdown button press"
GPIO.wait_for_edge(GPIO_SHUTDOWN_BUTTON, GPIO.FALLING)
print "Shutting down due to button press!"
GPIO.cleanup()
os.system("shutdown -h now")
|
<commit_before><commit_msg>Add script to shutdown Raspberry Pi on button press
Signed-off-by: Greg Meiste <8a8f45e57c045ec63dc7e56e5eda862ea8c7cd4f@gmail.com><commit_after>#!/usr/bin/env python
# Copyright (C) 2014 Gregory S. Meiste <http://gregmeiste.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import RPi.GPIO as GPIO
import os
GPIO_SHUTDOWN_BUTTON = 23
# Use the pin numbers from the ribbon cable board
GPIO.setmode(GPIO.BCM)
# Configure GPIO
GPIO.setup(GPIO_SHUTDOWN_BUTTON, GPIO.IN, pull_up_down=GPIO.PUD_UP)
print "Waiting for shutdown button press"
GPIO.wait_for_edge(GPIO_SHUTDOWN_BUTTON, GPIO.FALLING)
print "Shutting down due to button press!"
GPIO.cleanup()
os.system("shutdown -h now")
|
|
f26e8927f83c3a897d4f474762bca9775467e74e
|
src/helpers/vyos-load-config.py
|
src/helpers/vyos-load-config.py
|
#!/usr/bin/env python3
#
# Copyright (C) 2019 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""Load config file from within config session.
Config file specified by URI or path (without scheme prefix).
Example: load https://somewhere.net/some.config
or
load /tmp/some.config
"""
import sys
import tempfile
import vyos.defaults
import vyos.remote
from vyos.config import Config, VyOSError
from vyos.migrator import Migrator, MigratorError
system_config_file = 'config.boot'
class LoadConfig(Config):
"""A subclass for calling 'loadFile'.
This does not belong in config.py, and only has a single caller.
"""
def load_config(self, file_path):
cmd = [self._cli_shell_api, 'loadFile', file_path]
self._run(cmd)
if len(sys.argv) > 1:
file_name = sys.argv[1]
else:
file_name = system_config_file
configdir = vyos.defaults.directories['config']
protocols = ['scp', 'sftp', 'http', 'https', 'ftp', 'tftp']
if any(x in file_name for x in protocols):
config_file = vyos.remote.get_remote_config(file_name)
if not config_file:
sys.exit("No config file by that name.")
else:
canonical_path = '{0}/{1}'.format(configdir, file_name)
try:
with open(canonical_path, 'r') as f:
config_file = f.read()
except OSError as err1:
try:
with open(file_name, 'r') as f:
config_file = f.read()
except OSError as err2:
sys.exit('{0}\n{1}'.format(err1, err2))
config = LoadConfig()
print("Loading configuration from '{}'".format(file_name))
with tempfile.NamedTemporaryFile() as fp:
with open(fp.name, 'w') as fd:
fd.write(config_file)
migration = Migrator(fp.name)
try:
migration.run()
except MigratorError as err:
sys.exit('{}'.format(err))
try:
config.load_config(fp.name)
except VyOSError as err:
sys.exit('{}'.format(err))
if config.session_changed():
print("Load complete. Use 'commit' to make changes effective.")
else:
print("No configuration changes to commit.")
|
Rewrite the config load script
|
T1424: Rewrite the config load script
Rewrite of the load functionality of vyatta-load-config.pl, removing the
dependency on Vyatta::Config.
|
Python
|
lgpl-2.1
|
vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x,vyos/vyos-1x
|
T1424: Rewrite the config load script
Rewrite of the load functionality of vyatta-load-config.pl, removing the
dependency on Vyatta::Config.
|
#!/usr/bin/env python3
#
# Copyright (C) 2019 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""Load config file from within config session.
Config file specified by URI or path (without scheme prefix).
Example: load https://somewhere.net/some.config
or
load /tmp/some.config
"""
import sys
import tempfile
import vyos.defaults
import vyos.remote
from vyos.config import Config, VyOSError
from vyos.migrator import Migrator, MigratorError
system_config_file = 'config.boot'
class LoadConfig(Config):
"""A subclass for calling 'loadFile'.
This does not belong in config.py, and only has a single caller.
"""
def load_config(self, file_path):
cmd = [self._cli_shell_api, 'loadFile', file_path]
self._run(cmd)
if len(sys.argv) > 1:
file_name = sys.argv[1]
else:
file_name = system_config_file
configdir = vyos.defaults.directories['config']
protocols = ['scp', 'sftp', 'http', 'https', 'ftp', 'tftp']
if any(x in file_name for x in protocols):
config_file = vyos.remote.get_remote_config(file_name)
if not config_file:
sys.exit("No config file by that name.")
else:
canonical_path = '{0}/{1}'.format(configdir, file_name)
try:
with open(canonical_path, 'r') as f:
config_file = f.read()
except OSError as err1:
try:
with open(file_name, 'r') as f:
config_file = f.read()
except OSError as err2:
sys.exit('{0}\n{1}'.format(err1, err2))
config = LoadConfig()
print("Loading configuration from '{}'".format(file_name))
with tempfile.NamedTemporaryFile() as fp:
with open(fp.name, 'w') as fd:
fd.write(config_file)
migration = Migrator(fp.name)
try:
migration.run()
except MigratorError as err:
sys.exit('{}'.format(err))
try:
config.load_config(fp.name)
except VyOSError as err:
sys.exit('{}'.format(err))
if config.session_changed():
print("Load complete. Use 'commit' to make changes effective.")
else:
print("No configuration changes to commit.")
|
<commit_before><commit_msg>T1424: Rewrite the config load script
Rewrite of the load functionality of vyatta-load-config.pl, removing the
dependency on Vyatta::Config.<commit_after>
|
#!/usr/bin/env python3
#
# Copyright (C) 2019 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""Load config file from within config session.
Config file specified by URI or path (without scheme prefix).
Example: load https://somewhere.net/some.config
or
load /tmp/some.config
"""
import sys
import tempfile
import vyos.defaults
import vyos.remote
from vyos.config import Config, VyOSError
from vyos.migrator import Migrator, MigratorError
system_config_file = 'config.boot'
class LoadConfig(Config):
"""A subclass for calling 'loadFile'.
This does not belong in config.py, and only has a single caller.
"""
def load_config(self, file_path):
cmd = [self._cli_shell_api, 'loadFile', file_path]
self._run(cmd)
if len(sys.argv) > 1:
file_name = sys.argv[1]
else:
file_name = system_config_file
configdir = vyos.defaults.directories['config']
protocols = ['scp', 'sftp', 'http', 'https', 'ftp', 'tftp']
if any(x in file_name for x in protocols):
config_file = vyos.remote.get_remote_config(file_name)
if not config_file:
sys.exit("No config file by that name.")
else:
canonical_path = '{0}/{1}'.format(configdir, file_name)
try:
with open(canonical_path, 'r') as f:
config_file = f.read()
except OSError as err1:
try:
with open(file_name, 'r') as f:
config_file = f.read()
except OSError as err2:
sys.exit('{0}\n{1}'.format(err1, err2))
config = LoadConfig()
print("Loading configuration from '{}'".format(file_name))
with tempfile.NamedTemporaryFile() as fp:
with open(fp.name, 'w') as fd:
fd.write(config_file)
migration = Migrator(fp.name)
try:
migration.run()
except MigratorError as err:
sys.exit('{}'.format(err))
try:
config.load_config(fp.name)
except VyOSError as err:
sys.exit('{}'.format(err))
if config.session_changed():
print("Load complete. Use 'commit' to make changes effective.")
else:
print("No configuration changes to commit.")
|
T1424: Rewrite the config load script
Rewrite of the load functionality of vyatta-load-config.pl, removing the
dependency on Vyatta::Config.#!/usr/bin/env python3
#
# Copyright (C) 2019 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""Load config file from within config session.
Config file specified by URI or path (without scheme prefix).
Example: load https://somewhere.net/some.config
or
load /tmp/some.config
"""
import sys
import tempfile
import vyos.defaults
import vyos.remote
from vyos.config import Config, VyOSError
from vyos.migrator import Migrator, MigratorError
system_config_file = 'config.boot'
class LoadConfig(Config):
"""A subclass for calling 'loadFile'.
This does not belong in config.py, and only has a single caller.
"""
def load_config(self, file_path):
cmd = [self._cli_shell_api, 'loadFile', file_path]
self._run(cmd)
if len(sys.argv) > 1:
file_name = sys.argv[1]
else:
file_name = system_config_file
configdir = vyos.defaults.directories['config']
protocols = ['scp', 'sftp', 'http', 'https', 'ftp', 'tftp']
if any(x in file_name for x in protocols):
config_file = vyos.remote.get_remote_config(file_name)
if not config_file:
sys.exit("No config file by that name.")
else:
canonical_path = '{0}/{1}'.format(configdir, file_name)
try:
with open(canonical_path, 'r') as f:
config_file = f.read()
except OSError as err1:
try:
with open(file_name, 'r') as f:
config_file = f.read()
except OSError as err2:
sys.exit('{0}\n{1}'.format(err1, err2))
config = LoadConfig()
print("Loading configuration from '{}'".format(file_name))
with tempfile.NamedTemporaryFile() as fp:
with open(fp.name, 'w') as fd:
fd.write(config_file)
migration = Migrator(fp.name)
try:
migration.run()
except MigratorError as err:
sys.exit('{}'.format(err))
try:
config.load_config(fp.name)
except VyOSError as err:
sys.exit('{}'.format(err))
if config.session_changed():
print("Load complete. Use 'commit' to make changes effective.")
else:
print("No configuration changes to commit.")
|
<commit_before><commit_msg>T1424: Rewrite the config load script
Rewrite of the load functionality of vyatta-load-config.pl, removing the
dependency on Vyatta::Config.<commit_after>#!/usr/bin/env python3
#
# Copyright (C) 2019 VyOS maintainers and contributors
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 or later as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#
"""Load config file from within config session.
Config file specified by URI or path (without scheme prefix).
Example: load https://somewhere.net/some.config
or
load /tmp/some.config
"""
import sys
import tempfile
import vyos.defaults
import vyos.remote
from vyos.config import Config, VyOSError
from vyos.migrator import Migrator, MigratorError
system_config_file = 'config.boot'
class LoadConfig(Config):
"""A subclass for calling 'loadFile'.
This does not belong in config.py, and only has a single caller.
"""
def load_config(self, file_path):
cmd = [self._cli_shell_api, 'loadFile', file_path]
self._run(cmd)
if len(sys.argv) > 1:
file_name = sys.argv[1]
else:
file_name = system_config_file
configdir = vyos.defaults.directories['config']
protocols = ['scp', 'sftp', 'http', 'https', 'ftp', 'tftp']
if any(x in file_name for x in protocols):
config_file = vyos.remote.get_remote_config(file_name)
if not config_file:
sys.exit("No config file by that name.")
else:
canonical_path = '{0}/{1}'.format(configdir, file_name)
try:
with open(canonical_path, 'r') as f:
config_file = f.read()
except OSError as err1:
try:
with open(file_name, 'r') as f:
config_file = f.read()
except OSError as err2:
sys.exit('{0}\n{1}'.format(err1, err2))
config = LoadConfig()
print("Loading configuration from '{}'".format(file_name))
with tempfile.NamedTemporaryFile() as fp:
with open(fp.name, 'w') as fd:
fd.write(config_file)
migration = Migrator(fp.name)
try:
migration.run()
except MigratorError as err:
sys.exit('{}'.format(err))
try:
config.load_config(fp.name)
except VyOSError as err:
sys.exit('{}'.format(err))
if config.session_changed():
print("Load complete. Use 'commit' to make changes effective.")
else:
print("No configuration changes to commit.")
|
|
218848f916ffe55bed2e914ee5aa7be459d0ff4a
|
openstack/tests/functional/telemetry/v2/test_alarm.py
|
openstack/tests/functional/telemetry/v2/test_alarm.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.telemetry.v2 import alarm
from openstack.tests.functional import base
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.telemetry.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.telemetry.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.telemetry.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.telemetry.alarms()]
self.assertIn(self.NAME, names)
|
Add functional tests for telemetry alarm crud
|
Add functional tests for telemetry alarm crud
Change-Id: I77399eb47761c8de04095c06cd67d5d09e644df4
|
Python
|
apache-2.0
|
briancurtin/python-openstacksdk,stackforge/python-openstacksdk,dtroyer/python-openstacksdk,briancurtin/python-openstacksdk,mtougeron/python-openstacksdk,openstack/python-openstacksdk,dtroyer/python-openstacksdk,dudymas/python-openstacksdk,dudymas/python-openstacksdk,stackforge/python-openstacksdk,openstack/python-openstacksdk,mtougeron/python-openstacksdk
|
Add functional tests for telemetry alarm crud
Change-Id: I77399eb47761c8de04095c06cd67d5d09e644df4
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.telemetry.v2 import alarm
from openstack.tests.functional import base
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.telemetry.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.telemetry.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.telemetry.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.telemetry.alarms()]
self.assertIn(self.NAME, names)
|
<commit_before><commit_msg>Add functional tests for telemetry alarm crud
Change-Id: I77399eb47761c8de04095c06cd67d5d09e644df4<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.telemetry.v2 import alarm
from openstack.tests.functional import base
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.telemetry.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.telemetry.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.telemetry.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.telemetry.alarms()]
self.assertIn(self.NAME, names)
|
Add functional tests for telemetry alarm crud
Change-Id: I77399eb47761c8de04095c06cd67d5d09e644df4# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.telemetry.v2 import alarm
from openstack.tests.functional import base
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.telemetry.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.telemetry.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.telemetry.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.telemetry.alarms()]
self.assertIn(self.NAME, names)
|
<commit_before><commit_msg>Add functional tests for telemetry alarm crud
Change-Id: I77399eb47761c8de04095c06cd67d5d09e644df4<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import uuid
from openstack.telemetry.v2 import alarm
from openstack.tests.functional import base
class TestAlarm(base.BaseFunctionalTest):
NAME = uuid.uuid4().hex
ID = None
@classmethod
def setUpClass(cls):
super(TestAlarm, cls).setUpClass()
meter = next(cls.conn.telemetry.meters())
sot = cls.conn.telemetry.create_alarm(
name=cls.NAME,
type='threshold',
threshold_rule={
'meter_name': meter.name,
'threshold': 1.1,
},
)
assert isinstance(sot, alarm.Alarm)
cls.assertIs(cls.NAME, sot.name)
cls.ID = sot.id
@classmethod
def tearDownClass(cls):
sot = cls.conn.telemetry.delete_alarm(cls.ID, ignore_missing=False)
cls.assertIs(None, sot)
def test_get(self):
sot = self.conn.telemetry.get_alarm(self.ID)
self.assertEqual(self.NAME, sot.name)
self.assertEqual(self.ID, sot.id)
def test_list(self):
names = [o.name for o in self.conn.telemetry.alarms()]
self.assertIn(self.NAME, names)
|
|
02664bd5fbd1581bc4817a942f690a5e6a5b9929
|
share/render-template.py
|
share/render-template.py
|
#!/usr/bin/env python
from jinja2 import Environment, FileSystemLoader
import yaml
import os
import sys
def main():
# Check command line
if len(sys.argv) != 2:
print("Usage: render-template.py device")
sys.exit(1)
device = sys.argv[1]
env = Environment(loader=FileSystemLoader(
[os.path.join(os.path.dirname(__file__), 'jinja2/devices'),
os.path.join(os.path.dirname(__file__), 'jinja2/device_types')]),
trim_blocks=True)
template = env.get_template("%s.yaml" % device)
ctx = {}
config = template.render(**ctx)
print "YAML config"
print "==========="
print config
print "Parsed config"
print "============="
print yaml.load(config)
if __name__ =='__main__':
main()
|
Add an helper to render device conf templates
|
Add an helper to render device conf templates
Change-Id: Ida10d2963969b12aeb04ef034fa7814ffaf2c8ed
|
Python
|
agpl-3.0
|
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server
|
Add an helper to render device conf templates
Change-Id: Ida10d2963969b12aeb04ef034fa7814ffaf2c8ed
|
#!/usr/bin/env python
from jinja2 import Environment, FileSystemLoader
import yaml
import os
import sys
def main():
# Check command line
if len(sys.argv) != 2:
print("Usage: render-template.py device")
sys.exit(1)
device = sys.argv[1]
env = Environment(loader=FileSystemLoader(
[os.path.join(os.path.dirname(__file__), 'jinja2/devices'),
os.path.join(os.path.dirname(__file__), 'jinja2/device_types')]),
trim_blocks=True)
template = env.get_template("%s.yaml" % device)
ctx = {}
config = template.render(**ctx)
print "YAML config"
print "==========="
print config
print "Parsed config"
print "============="
print yaml.load(config)
if __name__ =='__main__':
main()
|
<commit_before><commit_msg>Add an helper to render device conf templates
Change-Id: Ida10d2963969b12aeb04ef034fa7814ffaf2c8ed<commit_after>
|
#!/usr/bin/env python
from jinja2 import Environment, FileSystemLoader
import yaml
import os
import sys
def main():
# Check command line
if len(sys.argv) != 2:
print("Usage: render-template.py device")
sys.exit(1)
device = sys.argv[1]
env = Environment(loader=FileSystemLoader(
[os.path.join(os.path.dirname(__file__), 'jinja2/devices'),
os.path.join(os.path.dirname(__file__), 'jinja2/device_types')]),
trim_blocks=True)
template = env.get_template("%s.yaml" % device)
ctx = {}
config = template.render(**ctx)
print "YAML config"
print "==========="
print config
print "Parsed config"
print "============="
print yaml.load(config)
if __name__ =='__main__':
main()
|
Add an helper to render device conf templates
Change-Id: Ida10d2963969b12aeb04ef034fa7814ffaf2c8ed#!/usr/bin/env python
from jinja2 import Environment, FileSystemLoader
import yaml
import os
import sys
def main():
# Check command line
if len(sys.argv) != 2:
print("Usage: render-template.py device")
sys.exit(1)
device = sys.argv[1]
env = Environment(loader=FileSystemLoader(
[os.path.join(os.path.dirname(__file__), 'jinja2/devices'),
os.path.join(os.path.dirname(__file__), 'jinja2/device_types')]),
trim_blocks=True)
template = env.get_template("%s.yaml" % device)
ctx = {}
config = template.render(**ctx)
print "YAML config"
print "==========="
print config
print "Parsed config"
print "============="
print yaml.load(config)
if __name__ =='__main__':
main()
|
<commit_before><commit_msg>Add an helper to render device conf templates
Change-Id: Ida10d2963969b12aeb04ef034fa7814ffaf2c8ed<commit_after>#!/usr/bin/env python
from jinja2 import Environment, FileSystemLoader
import yaml
import os
import sys
def main():
# Check command line
if len(sys.argv) != 2:
print("Usage: render-template.py device")
sys.exit(1)
device = sys.argv[1]
env = Environment(loader=FileSystemLoader(
[os.path.join(os.path.dirname(__file__), 'jinja2/devices'),
os.path.join(os.path.dirname(__file__), 'jinja2/device_types')]),
trim_blocks=True)
template = env.get_template("%s.yaml" % device)
ctx = {}
config = template.render(**ctx)
print "YAML config"
print "==========="
print config
print "Parsed config"
print "============="
print yaml.load(config)
if __name__ =='__main__':
main()
|
|
bc1faaa6265f70254702709efb7f8410fbe07286
|
alexa-british-problems.py
|
alexa-british-problems.py
|
import json
import time
import requests
import unidecode
from flask import Flask
from flask_ask import Ask, question, session, statement
APP = Flask(__name__)
ASK = Ask(APP, "/british_problems")
def get_british_problems():
"""Get the titles of the /r/britishproblems posts"""
user_pass_dict = {'user': 'alexabritishproblems',
'passwd': '83O9ls8eC77lmO%3@rw&',
'api_type': 'json'}
sess = requests.Session()
sess.headers.update(
{'User-Agent': 'alexa:british_problems:0.1 ' +
'(by /u/alexabritishproblems)'})
sess.post('https://wwww.reddit.com/api/login', data=user_pass_dict)
time.sleep(1)
url = "https://reddit.com/r/britishproblems/.json?limit=10"
html = sess.get(url)
data = json.loads(html.content.decode('utf-8'))
titles = [unidecode.unidecode(listing['data']['title'])
for listing in data['data']['children']]
return titles
titles = get_british_problems()
print titles
@APP.route('/')
def homepage():
"""Flask default routing"""
return 'greetz fren'
@ASK.launch
def start_skill():
"""Entry point for the alexa skill"""
welcome_message = 'Hello there, would you like to hear a very British problem?'
return question(welcome_message)
@ASK.intent("GetNewBritishProblem")
def handle_get_problem_intent():
"""Handles the intent for getting a new british problem and outputting it to Alexa"""
british_problem = get_british_problems()
return statement(british_problem)
@ASK.intent("NoIntent")
def handle_no_intent():
"""Handles an unmatched intent"""
goodbye_message = 'See you later... bye.'
return statement(goodbye_message)
if __name__ == '__main__':
APP.run(debug=True)
|
Add alexa skill basic functionality
|
Add alexa skill basic functionality
|
Python
|
mit
|
Sorsby/alexa-british-problems
|
Add alexa skill basic functionality
|
import json
import time
import requests
import unidecode
from flask import Flask
from flask_ask import Ask, question, session, statement
APP = Flask(__name__)
ASK = Ask(APP, "/british_problems")
def get_british_problems():
"""Get the titles of the /r/britishproblems posts"""
user_pass_dict = {'user': 'alexabritishproblems',
'passwd': '83O9ls8eC77lmO%3@rw&',
'api_type': 'json'}
sess = requests.Session()
sess.headers.update(
{'User-Agent': 'alexa:british_problems:0.1 ' +
'(by /u/alexabritishproblems)'})
sess.post('https://wwww.reddit.com/api/login', data=user_pass_dict)
time.sleep(1)
url = "https://reddit.com/r/britishproblems/.json?limit=10"
html = sess.get(url)
data = json.loads(html.content.decode('utf-8'))
titles = [unidecode.unidecode(listing['data']['title'])
for listing in data['data']['children']]
return titles
titles = get_british_problems()
print titles
@APP.route('/')
def homepage():
"""Flask default routing"""
return 'greetz fren'
@ASK.launch
def start_skill():
"""Entry point for the alexa skill"""
welcome_message = 'Hello there, would you like to hear a very British problem?'
return question(welcome_message)
@ASK.intent("GetNewBritishProblem")
def handle_get_problem_intent():
"""Handles the intent for getting a new british problem and outputting it to Alexa"""
british_problem = get_british_problems()
return statement(british_problem)
@ASK.intent("NoIntent")
def handle_no_intent():
"""Handles an unmatched intent"""
goodbye_message = 'See you later... bye.'
return statement(goodbye_message)
if __name__ == '__main__':
APP.run(debug=True)
|
<commit_before><commit_msg>Add alexa skill basic functionality<commit_after>
|
import json
import time
import requests
import unidecode
from flask import Flask
from flask_ask import Ask, question, session, statement
APP = Flask(__name__)
ASK = Ask(APP, "/british_problems")
def get_british_problems():
"""Get the titles of the /r/britishproblems posts"""
user_pass_dict = {'user': 'alexabritishproblems',
'passwd': '83O9ls8eC77lmO%3@rw&',
'api_type': 'json'}
sess = requests.Session()
sess.headers.update(
{'User-Agent': 'alexa:british_problems:0.1 ' +
'(by /u/alexabritishproblems)'})
sess.post('https://wwww.reddit.com/api/login', data=user_pass_dict)
time.sleep(1)
url = "https://reddit.com/r/britishproblems/.json?limit=10"
html = sess.get(url)
data = json.loads(html.content.decode('utf-8'))
titles = [unidecode.unidecode(listing['data']['title'])
for listing in data['data']['children']]
return titles
titles = get_british_problems()
print titles
@APP.route('/')
def homepage():
"""Flask default routing"""
return 'greetz fren'
@ASK.launch
def start_skill():
"""Entry point for the alexa skill"""
welcome_message = 'Hello there, would you like to hear a very British problem?'
return question(welcome_message)
@ASK.intent("GetNewBritishProblem")
def handle_get_problem_intent():
"""Handles the intent for getting a new british problem and outputting it to Alexa"""
british_problem = get_british_problems()
return statement(british_problem)
@ASK.intent("NoIntent")
def handle_no_intent():
"""Handles an unmatched intent"""
goodbye_message = 'See you later... bye.'
return statement(goodbye_message)
if __name__ == '__main__':
APP.run(debug=True)
|
Add alexa skill basic functionalityimport json
import time
import requests
import unidecode
from flask import Flask
from flask_ask import Ask, question, session, statement
APP = Flask(__name__)
ASK = Ask(APP, "/british_problems")
def get_british_problems():
"""Get the titles of the /r/britishproblems posts"""
user_pass_dict = {'user': 'alexabritishproblems',
'passwd': '83O9ls8eC77lmO%3@rw&',
'api_type': 'json'}
sess = requests.Session()
sess.headers.update(
{'User-Agent': 'alexa:british_problems:0.1 ' +
'(by /u/alexabritishproblems)'})
sess.post('https://wwww.reddit.com/api/login', data=user_pass_dict)
time.sleep(1)
url = "https://reddit.com/r/britishproblems/.json?limit=10"
html = sess.get(url)
data = json.loads(html.content.decode('utf-8'))
titles = [unidecode.unidecode(listing['data']['title'])
for listing in data['data']['children']]
return titles
titles = get_british_problems()
print titles
@APP.route('/')
def homepage():
"""Flask default routing"""
return 'greetz fren'
@ASK.launch
def start_skill():
"""Entry point for the alexa skill"""
welcome_message = 'Hello there, would you like to hear a very British problem?'
return question(welcome_message)
@ASK.intent("GetNewBritishProblem")
def handle_get_problem_intent():
"""Handles the intent for getting a new british problem and outputting it to Alexa"""
british_problem = get_british_problems()
return statement(british_problem)
@ASK.intent("NoIntent")
def handle_no_intent():
"""Handles an unmatched intent"""
goodbye_message = 'See you later... bye.'
return statement(goodbye_message)
if __name__ == '__main__':
APP.run(debug=True)
|
<commit_before><commit_msg>Add alexa skill basic functionality<commit_after>import json
import time
import requests
import unidecode
from flask import Flask
from flask_ask import Ask, question, session, statement
APP = Flask(__name__)
ASK = Ask(APP, "/british_problems")
def get_british_problems():
"""Get the titles of the /r/britishproblems posts"""
user_pass_dict = {'user': 'alexabritishproblems',
'passwd': '83O9ls8eC77lmO%3@rw&',
'api_type': 'json'}
sess = requests.Session()
sess.headers.update(
{'User-Agent': 'alexa:british_problems:0.1 ' +
'(by /u/alexabritishproblems)'})
sess.post('https://wwww.reddit.com/api/login', data=user_pass_dict)
time.sleep(1)
url = "https://reddit.com/r/britishproblems/.json?limit=10"
html = sess.get(url)
data = json.loads(html.content.decode('utf-8'))
titles = [unidecode.unidecode(listing['data']['title'])
for listing in data['data']['children']]
return titles
titles = get_british_problems()
print titles
@APP.route('/')
def homepage():
"""Flask default routing"""
return 'greetz fren'
@ASK.launch
def start_skill():
"""Entry point for the alexa skill"""
welcome_message = 'Hello there, would you like to hear a very British problem?'
return question(welcome_message)
@ASK.intent("GetNewBritishProblem")
def handle_get_problem_intent():
"""Handles the intent for getting a new british problem and outputting it to Alexa"""
british_problem = get_british_problems()
return statement(british_problem)
@ASK.intent("NoIntent")
def handle_no_intent():
"""Handles an unmatched intent"""
goodbye_message = 'See you later... bye.'
return statement(goodbye_message)
if __name__ == '__main__':
APP.run(debug=True)
|
|
38a07f395ffa6bfe20cf4c205c8b4cf30411fab5
|
evaluation/packages/orderedSet.py
|
evaluation/packages/orderedSet.py
|
"""
Code from http://code.activestate.com/recipes/576694/
"""
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
|
Add new ordered set class
|
Add new ordered set class
|
Python
|
apache-2.0
|
amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt
|
Add new ordered set class
|
"""
Code from http://code.activestate.com/recipes/576694/
"""
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
|
<commit_before><commit_msg>Add new ordered set class<commit_after>
|
"""
Code from http://code.activestate.com/recipes/576694/
"""
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
|
Add new ordered set class"""
Code from http://code.activestate.com/recipes/576694/
"""
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
|
<commit_before><commit_msg>Add new ordered set class<commit_after>"""
Code from http://code.activestate.com/recipes/576694/
"""
import collections
class OrderedSet(collections.MutableSet):
def __init__(self, iterable=None):
self.end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.map = {} # key --> [key, prev, next]
if iterable is not None:
self |= iterable
def __len__(self):
return len(self.map)
def __contains__(self, key):
return key in self.map
def add(self, key):
if key not in self.map:
end = self.end
curr = end[1]
curr[2] = end[1] = self.map[key] = [key, curr, end]
def discard(self, key):
if key in self.map:
key, prev, next = self.map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def pop(self, last=True):
if not self:
raise KeyError('set is empty')
key = self.end[1][0] if last else self.end[2][0]
self.discard(key)
return key
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, list(self))
def __eq__(self, other):
if isinstance(other, OrderedSet):
return len(self) == len(other) and list(self) == list(other)
return set(self) == set(other)
if __name__ == '__main__':
s = OrderedSet('abracadaba')
t = OrderedSet('simsalabim')
print(s | t)
print(s & t)
print(s - t)
|
|
9a36a91a9fff8b6ce0deaf8c1aee259e2abc911e
|
examples/multiline_echo_server.py
|
examples/multiline_echo_server.py
|
#!/usr/bin/env python
from gaidaros import *
server = Gaidaros(end_request = lambda x: '\n\n' in x, split_request = lambda x: return(x[:x.find('\n\n') + 1], x[x.find('\n\n') + 1:]))
server.serve()
|
Add multiline echo server example script
|
Add multiline echo server example script
|
Python
|
mit
|
rowanthorpe/gaidaros
|
Add multiline echo server example script
|
#!/usr/bin/env python
from gaidaros import *
server = Gaidaros(end_request = lambda x: '\n\n' in x, split_request = lambda x: return(x[:x.find('\n\n') + 1], x[x.find('\n\n') + 1:]))
server.serve()
|
<commit_before><commit_msg>Add multiline echo server example script<commit_after>
|
#!/usr/bin/env python
from gaidaros import *
server = Gaidaros(end_request = lambda x: '\n\n' in x, split_request = lambda x: return(x[:x.find('\n\n') + 1], x[x.find('\n\n') + 1:]))
server.serve()
|
Add multiline echo server example script#!/usr/bin/env python
from gaidaros import *
server = Gaidaros(end_request = lambda x: '\n\n' in x, split_request = lambda x: return(x[:x.find('\n\n') + 1], x[x.find('\n\n') + 1:]))
server.serve()
|
<commit_before><commit_msg>Add multiline echo server example script<commit_after>#!/usr/bin/env python
from gaidaros import *
server = Gaidaros(end_request = lambda x: '\n\n' in x, split_request = lambda x: return(x[:x.find('\n\n') + 1], x[x.find('\n\n') + 1:]))
server.serve()
|
|
9ae8dcb6eca400e4d58a652a0d949087f321c307
|
python/misc/fizzbuzz.py
|
python/misc/fizzbuzz.py
|
#!/usr/bin/env python
# pylint: disable=C0111
# pylint: disable=C0103
# pylint: disable=C0330
MAX = 31
def simple():
print ('--- SIMPLE -----')
for i in range(1, MAX):
output = ''
if i % 3 == 0:
output += 'Fizz'
if i % 5 == 0:
output += 'Buzz'
if not output:
output += str(i)
print(output)
print ('--------------------------')
def parametrized(mapping):
print ('--- PARAMETRIZED -----')
for i in range(1, MAX):
output = ''
for k, v in mapping.items():
if i % k == 0:
output += v
print(output or str(i))
print ('--------------------------')
def no_if(mapping):
## TODO find a way to kill all conditions
print ('--- NO IF #1 -----')
def getmod(x, m):
return x % m + m
def mapmod(x):
for k, v in mapping.items():
yield mapping.get(getmod(x, k), x)
#results = map(lambda x: getmod(x), range(0, MAX))
#results = map(lambda x: mapmod(x), range(1, MAX))
for x in range(1, MAX):
for p in mapmod(x):
print (p)
#print (*results, sep='\n')
print ('--------------------------')
### Main ###
if __name__ == "__main__":
simple()
parametrized({3: 'Fizz', 5: 'Buzz'})
no_if({3: 'Fizz', 5: 'Buzz'})
|
Add a few fizz buzz attempts
|
Add a few fizz buzz attempts
|
Python
|
mit
|
petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox,petarov/sandbox
|
Add a few fizz buzz attempts
|
#!/usr/bin/env python
# pylint: disable=C0111
# pylint: disable=C0103
# pylint: disable=C0330
MAX = 31
def simple():
print ('--- SIMPLE -----')
for i in range(1, MAX):
output = ''
if i % 3 == 0:
output += 'Fizz'
if i % 5 == 0:
output += 'Buzz'
if not output:
output += str(i)
print(output)
print ('--------------------------')
def parametrized(mapping):
print ('--- PARAMETRIZED -----')
for i in range(1, MAX):
output = ''
for k, v in mapping.items():
if i % k == 0:
output += v
print(output or str(i))
print ('--------------------------')
def no_if(mapping):
## TODO find a way to kill all conditions
print ('--- NO IF #1 -----')
def getmod(x, m):
return x % m + m
def mapmod(x):
for k, v in mapping.items():
yield mapping.get(getmod(x, k), x)
#results = map(lambda x: getmod(x), range(0, MAX))
#results = map(lambda x: mapmod(x), range(1, MAX))
for x in range(1, MAX):
for p in mapmod(x):
print (p)
#print (*results, sep='\n')
print ('--------------------------')
### Main ###
if __name__ == "__main__":
simple()
parametrized({3: 'Fizz', 5: 'Buzz'})
no_if({3: 'Fizz', 5: 'Buzz'})
|
<commit_before><commit_msg>Add a few fizz buzz attempts<commit_after>
|
#!/usr/bin/env python
# pylint: disable=C0111
# pylint: disable=C0103
# pylint: disable=C0330
MAX = 31
def simple():
print ('--- SIMPLE -----')
for i in range(1, MAX):
output = ''
if i % 3 == 0:
output += 'Fizz'
if i % 5 == 0:
output += 'Buzz'
if not output:
output += str(i)
print(output)
print ('--------------------------')
def parametrized(mapping):
print ('--- PARAMETRIZED -----')
for i in range(1, MAX):
output = ''
for k, v in mapping.items():
if i % k == 0:
output += v
print(output or str(i))
print ('--------------------------')
def no_if(mapping):
## TODO find a way to kill all conditions
print ('--- NO IF #1 -----')
def getmod(x, m):
return x % m + m
def mapmod(x):
for k, v in mapping.items():
yield mapping.get(getmod(x, k), x)
#results = map(lambda x: getmod(x), range(0, MAX))
#results = map(lambda x: mapmod(x), range(1, MAX))
for x in range(1, MAX):
for p in mapmod(x):
print (p)
#print (*results, sep='\n')
print ('--------------------------')
### Main ###
if __name__ == "__main__":
simple()
parametrized({3: 'Fizz', 5: 'Buzz'})
no_if({3: 'Fizz', 5: 'Buzz'})
|
Add a few fizz buzz attempts#!/usr/bin/env python
# pylint: disable=C0111
# pylint: disable=C0103
# pylint: disable=C0330
MAX = 31
def simple():
print ('--- SIMPLE -----')
for i in range(1, MAX):
output = ''
if i % 3 == 0:
output += 'Fizz'
if i % 5 == 0:
output += 'Buzz'
if not output:
output += str(i)
print(output)
print ('--------------------------')
def parametrized(mapping):
print ('--- PARAMETRIZED -----')
for i in range(1, MAX):
output = ''
for k, v in mapping.items():
if i % k == 0:
output += v
print(output or str(i))
print ('--------------------------')
def no_if(mapping):
## TODO find a way to kill all conditions
print ('--- NO IF #1 -----')
def getmod(x, m):
return x % m + m
def mapmod(x):
for k, v in mapping.items():
yield mapping.get(getmod(x, k), x)
#results = map(lambda x: getmod(x), range(0, MAX))
#results = map(lambda x: mapmod(x), range(1, MAX))
for x in range(1, MAX):
for p in mapmod(x):
print (p)
#print (*results, sep='\n')
print ('--------------------------')
### Main ###
if __name__ == "__main__":
simple()
parametrized({3: 'Fizz', 5: 'Buzz'})
no_if({3: 'Fizz', 5: 'Buzz'})
|
<commit_before><commit_msg>Add a few fizz buzz attempts<commit_after>#!/usr/bin/env python
# pylint: disable=C0111
# pylint: disable=C0103
# pylint: disable=C0330
MAX = 31
def simple():
print ('--- SIMPLE -----')
for i in range(1, MAX):
output = ''
if i % 3 == 0:
output += 'Fizz'
if i % 5 == 0:
output += 'Buzz'
if not output:
output += str(i)
print(output)
print ('--------------------------')
def parametrized(mapping):
print ('--- PARAMETRIZED -----')
for i in range(1, MAX):
output = ''
for k, v in mapping.items():
if i % k == 0:
output += v
print(output or str(i))
print ('--------------------------')
def no_if(mapping):
## TODO find a way to kill all conditions
print ('--- NO IF #1 -----')
def getmod(x, m):
return x % m + m
def mapmod(x):
for k, v in mapping.items():
yield mapping.get(getmod(x, k), x)
#results = map(lambda x: getmod(x), range(0, MAX))
#results = map(lambda x: mapmod(x), range(1, MAX))
for x in range(1, MAX):
for p in mapmod(x):
print (p)
#print (*results, sep='\n')
print ('--------------------------')
### Main ###
if __name__ == "__main__":
simple()
parametrized({3: 'Fizz', 5: 'Buzz'})
no_if({3: 'Fizz', 5: 'Buzz'})
|
|
9cd5226f64b93d19f6af7cfd76436118b00bd781
|
example/amex.py
|
example/amex.py
|
# Example on how to use the ofxclient library without the
# web interface. It's admittedly clunky but doable -- it's
# free so get over it >=)
import ofxclient
from pprint import pprint
# http://www.ofxhome.com/index.php/institution/view/424
# note this is NOT the FI Id. It's the ofxhome ID.
ofxhome_id = '424'
your_username = 'genewilder'
your_password = 'ihatecandy'
# yeah I know, you can't pass the 'pass' in
# the constructor.. I'm lame and maybe I'll fix
# it later
institution = ofxclient.Institution(
id = ofxhome_id,
username = your_username
)
institution.password = your_password
# You HAVE to call save() but only just once. Calling save
# repeatedly won't hurt anything.
# Note that ffter calling this, you would never need to specify the
# institution.password again as it will be loaded from the keychain
#
# save() triggers saving of cache information (see ~/.ofxclient) as well
# as a config file (see ~/.ofxclient.conf)
institution.save()
accounts = institution.accounts()
# returns an ofxparse.Statement object
# see an the ofx.account.statement portion of their docs:
# https://github.com/jseutter/ofxparse/blob/master/README
statement = accounts[0].statement(days=5)
# get the balance
print "balance: %s" % statement.balance
# and get the transactions too if you want
pprint(statement.transactions)
|
Add an example on the library usage
|
Add an example on the library usage
|
Python
|
mit
|
jbms/ofxclient,captin411/ofxclient
|
Add an example on the library usage
|
# Example on how to use the ofxclient library without the
# web interface. It's admittedly clunky but doable -- it's
# free so get over it >=)
import ofxclient
from pprint import pprint
# http://www.ofxhome.com/index.php/institution/view/424
# note this is NOT the FI Id. It's the ofxhome ID.
ofxhome_id = '424'
your_username = 'genewilder'
your_password = 'ihatecandy'
# yeah I know, you can't pass the 'pass' in
# the constructor.. I'm lame and maybe I'll fix
# it later
institution = ofxclient.Institution(
id = ofxhome_id,
username = your_username
)
institution.password = your_password
# You HAVE to call save() but only just once. Calling save
# repeatedly won't hurt anything.
# Note that ffter calling this, you would never need to specify the
# institution.password again as it will be loaded from the keychain
#
# save() triggers saving of cache information (see ~/.ofxclient) as well
# as a config file (see ~/.ofxclient.conf)
institution.save()
accounts = institution.accounts()
# returns an ofxparse.Statement object
# see an the ofx.account.statement portion of their docs:
# https://github.com/jseutter/ofxparse/blob/master/README
statement = accounts[0].statement(days=5)
# get the balance
print "balance: %s" % statement.balance
# and get the transactions too if you want
pprint(statement.transactions)
|
<commit_before><commit_msg>Add an example on the library usage<commit_after>
|
# Example on how to use the ofxclient library without the
# web interface. It's admittedly clunky but doable -- it's
# free so get over it >=)
import ofxclient
from pprint import pprint
# http://www.ofxhome.com/index.php/institution/view/424
# note this is NOT the FI Id. It's the ofxhome ID.
ofxhome_id = '424'
your_username = 'genewilder'
your_password = 'ihatecandy'
# yeah I know, you can't pass the 'pass' in
# the constructor.. I'm lame and maybe I'll fix
# it later
institution = ofxclient.Institution(
id = ofxhome_id,
username = your_username
)
institution.password = your_password
# You HAVE to call save() but only just once. Calling save
# repeatedly won't hurt anything.
# Note that ffter calling this, you would never need to specify the
# institution.password again as it will be loaded from the keychain
#
# save() triggers saving of cache information (see ~/.ofxclient) as well
# as a config file (see ~/.ofxclient.conf)
institution.save()
accounts = institution.accounts()
# returns an ofxparse.Statement object
# see an the ofx.account.statement portion of their docs:
# https://github.com/jseutter/ofxparse/blob/master/README
statement = accounts[0].statement(days=5)
# get the balance
print "balance: %s" % statement.balance
# and get the transactions too if you want
pprint(statement.transactions)
|
Add an example on the library usage# Example on how to use the ofxclient library without the
# web interface. It's admittedly clunky but doable -- it's
# free so get over it >=)
import ofxclient
from pprint import pprint
# http://www.ofxhome.com/index.php/institution/view/424
# note this is NOT the FI Id. It's the ofxhome ID.
ofxhome_id = '424'
your_username = 'genewilder'
your_password = 'ihatecandy'
# yeah I know, you can't pass the 'pass' in
# the constructor.. I'm lame and maybe I'll fix
# it later
institution = ofxclient.Institution(
id = ofxhome_id,
username = your_username
)
institution.password = your_password
# You HAVE to call save() but only just once. Calling save
# repeatedly won't hurt anything.
# Note that ffter calling this, you would never need to specify the
# institution.password again as it will be loaded from the keychain
#
# save() triggers saving of cache information (see ~/.ofxclient) as well
# as a config file (see ~/.ofxclient.conf)
institution.save()
accounts = institution.accounts()
# returns an ofxparse.Statement object
# see an the ofx.account.statement portion of their docs:
# https://github.com/jseutter/ofxparse/blob/master/README
statement = accounts[0].statement(days=5)
# get the balance
print "balance: %s" % statement.balance
# and get the transactions too if you want
pprint(statement.transactions)
|
<commit_before><commit_msg>Add an example on the library usage<commit_after># Example on how to use the ofxclient library without the
# web interface. It's admittedly clunky but doable -- it's
# free so get over it >=)
import ofxclient
from pprint import pprint
# http://www.ofxhome.com/index.php/institution/view/424
# note this is NOT the FI Id. It's the ofxhome ID.
ofxhome_id = '424'
your_username = 'genewilder'
your_password = 'ihatecandy'
# yeah I know, you can't pass the 'pass' in
# the constructor.. I'm lame and maybe I'll fix
# it later
institution = ofxclient.Institution(
id = ofxhome_id,
username = your_username
)
institution.password = your_password
# You HAVE to call save() but only just once. Calling save
# repeatedly won't hurt anything.
# Note that ffter calling this, you would never need to specify the
# institution.password again as it will be loaded from the keychain
#
# save() triggers saving of cache information (see ~/.ofxclient) as well
# as a config file (see ~/.ofxclient.conf)
institution.save()
accounts = institution.accounts()
# returns an ofxparse.Statement object
# see an the ofx.account.statement portion of their docs:
# https://github.com/jseutter/ofxparse/blob/master/README
statement = accounts[0].statement(days=5)
# get the balance
print "balance: %s" % statement.balance
# and get the transactions too if you want
pprint(statement.transactions)
|
|
f4fa2d526f6f9c8b972c20ac073ed8f0682871ea
|
indra/tools/disambiguate.py
|
indra/tools/disambiguate.py
|
import logging
from collections import defaultdict
from indra.literature.elsevier_client import logger as elsevier_logger
from indra.literature import pubmed_client, pmc_client, elsevier_client
logger = logging.getLogger('disambiguate')
# the elsevier_client will log messages that it is safe to ignore
elsevier_logger.setLevel(logging.WARNING)
def get_fulltexts_from_entrez(hgnc_name):
pmids = pubmed_client.get_ids_for_gene(hgnc_name)
articles = (pubmed_client.get_article_xml(pmid) for pmid in pmids)
fulltexts = [_universal_extract_text(article) for article in articles]
return fulltexts
def _universal_extract_text(xml):
# first try to parse the xml as if it came from elsevier. if we do not
# have valid elsevier xml this will throw an exception.
# the text extraction function in the pmc client may not throw an
# exception when parsing elsevier xml, silently processing the xml
# incorrectly
try:
fulltext = elsevier_client.extract_text(xml)
except Exception:
try:
fulltext = pmc_client.extract_text(xml)
except Exception:
# fall back by returning input string unmodified
fulltext = xml
return fulltext
def _get_text_from_pmids(pmids):
pmc_content = set(pubmed_client.filter_pmids(pmids))
pmc_ids = (pmc_client.id_lookup(pmid, idtype='pmid')['pmcid']
for pmid in pmc_content)
pmc_xmls = (pmc_client.get_xml(pmc_id) for pmc_id in pmc_ids)
pmc_texts = set(_universal_extract_text(xml) for xml in pmc_xmls)
other_content = set(pmids) - pmc_content
ids = (pmc_client.id_lookup(pmid, idtype='pmid') for pmid in pmids)
elsevier_content = (elsevier_client.download_article_from_id(pmid)
for pmid in pmids)
|
Add unfinished scripts that assist in deft disambiguation
|
Add unfinished scripts that assist in deft disambiguation
git history was completley farbed through carelessness. the original deft
branch was deleted and a new branch was created
|
Python
|
bsd-2-clause
|
bgyori/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/indra,sorgerlab/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,pvtodorov/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,johnbachman/belpy,bgyori/indra
|
Add unfinished scripts that assist in deft disambiguation
git history was completley farbed through carelessness. the original deft
branch was deleted and a new branch was created
|
import logging
from collections import defaultdict
from indra.literature.elsevier_client import logger as elsevier_logger
from indra.literature import pubmed_client, pmc_client, elsevier_client
logger = logging.getLogger('disambiguate')
# the elsevier_client will log messages that it is safe to ignore
elsevier_logger.setLevel(logging.WARNING)
def get_fulltexts_from_entrez(hgnc_name):
pmids = pubmed_client.get_ids_for_gene(hgnc_name)
articles = (pubmed_client.get_article_xml(pmid) for pmid in pmids)
fulltexts = [_universal_extract_text(article) for article in articles]
return fulltexts
def _universal_extract_text(xml):
# first try to parse the xml as if it came from elsevier. if we do not
# have valid elsevier xml this will throw an exception.
# the text extraction function in the pmc client may not throw an
# exception when parsing elsevier xml, silently processing the xml
# incorrectly
try:
fulltext = elsevier_client.extract_text(xml)
except Exception:
try:
fulltext = pmc_client.extract_text(xml)
except Exception:
# fall back by returning input string unmodified
fulltext = xml
return fulltext
def _get_text_from_pmids(pmids):
pmc_content = set(pubmed_client.filter_pmids(pmids))
pmc_ids = (pmc_client.id_lookup(pmid, idtype='pmid')['pmcid']
for pmid in pmc_content)
pmc_xmls = (pmc_client.get_xml(pmc_id) for pmc_id in pmc_ids)
pmc_texts = set(_universal_extract_text(xml) for xml in pmc_xmls)
other_content = set(pmids) - pmc_content
ids = (pmc_client.id_lookup(pmid, idtype='pmid') for pmid in pmids)
elsevier_content = (elsevier_client.download_article_from_id(pmid)
for pmid in pmids)
|
<commit_before><commit_msg>Add unfinished scripts that assist in deft disambiguation
git history was completley farbed through carelessness. the original deft
branch was deleted and a new branch was created<commit_after>
|
import logging
from collections import defaultdict
from indra.literature.elsevier_client import logger as elsevier_logger
from indra.literature import pubmed_client, pmc_client, elsevier_client
logger = logging.getLogger('disambiguate')
# the elsevier_client will log messages that it is safe to ignore
elsevier_logger.setLevel(logging.WARNING)
def get_fulltexts_from_entrez(hgnc_name):
pmids = pubmed_client.get_ids_for_gene(hgnc_name)
articles = (pubmed_client.get_article_xml(pmid) for pmid in pmids)
fulltexts = [_universal_extract_text(article) for article in articles]
return fulltexts
def _universal_extract_text(xml):
# first try to parse the xml as if it came from elsevier. if we do not
# have valid elsevier xml this will throw an exception.
# the text extraction function in the pmc client may not throw an
# exception when parsing elsevier xml, silently processing the xml
# incorrectly
try:
fulltext = elsevier_client.extract_text(xml)
except Exception:
try:
fulltext = pmc_client.extract_text(xml)
except Exception:
# fall back by returning input string unmodified
fulltext = xml
return fulltext
def _get_text_from_pmids(pmids):
pmc_content = set(pubmed_client.filter_pmids(pmids))
pmc_ids = (pmc_client.id_lookup(pmid, idtype='pmid')['pmcid']
for pmid in pmc_content)
pmc_xmls = (pmc_client.get_xml(pmc_id) for pmc_id in pmc_ids)
pmc_texts = set(_universal_extract_text(xml) for xml in pmc_xmls)
other_content = set(pmids) - pmc_content
ids = (pmc_client.id_lookup(pmid, idtype='pmid') for pmid in pmids)
elsevier_content = (elsevier_client.download_article_from_id(pmid)
for pmid in pmids)
|
Add unfinished scripts that assist in deft disambiguation
git history was completley farbed through carelessness. the original deft
branch was deleted and a new branch was createdimport logging
from collections import defaultdict
from indra.literature.elsevier_client import logger as elsevier_logger
from indra.literature import pubmed_client, pmc_client, elsevier_client
logger = logging.getLogger('disambiguate')
# the elsevier_client will log messages that it is safe to ignore
elsevier_logger.setLevel(logging.WARNING)
def get_fulltexts_from_entrez(hgnc_name):
pmids = pubmed_client.get_ids_for_gene(hgnc_name)
articles = (pubmed_client.get_article_xml(pmid) for pmid in pmids)
fulltexts = [_universal_extract_text(article) for article in articles]
return fulltexts
def _universal_extract_text(xml):
# first try to parse the xml as if it came from elsevier. if we do not
# have valid elsevier xml this will throw an exception.
# the text extraction function in the pmc client may not throw an
# exception when parsing elsevier xml, silently processing the xml
# incorrectly
try:
fulltext = elsevier_client.extract_text(xml)
except Exception:
try:
fulltext = pmc_client.extract_text(xml)
except Exception:
# fall back by returning input string unmodified
fulltext = xml
return fulltext
def _get_text_from_pmids(pmids):
pmc_content = set(pubmed_client.filter_pmids(pmids))
pmc_ids = (pmc_client.id_lookup(pmid, idtype='pmid')['pmcid']
for pmid in pmc_content)
pmc_xmls = (pmc_client.get_xml(pmc_id) for pmc_id in pmc_ids)
pmc_texts = set(_universal_extract_text(xml) for xml in pmc_xmls)
other_content = set(pmids) - pmc_content
ids = (pmc_client.id_lookup(pmid, idtype='pmid') for pmid in pmids)
elsevier_content = (elsevier_client.download_article_from_id(pmid)
for pmid in pmids)
|
<commit_before><commit_msg>Add unfinished scripts that assist in deft disambiguation
git history was completley farbed through carelessness. the original deft
branch was deleted and a new branch was created<commit_after>import logging
from collections import defaultdict
from indra.literature.elsevier_client import logger as elsevier_logger
from indra.literature import pubmed_client, pmc_client, elsevier_client
logger = logging.getLogger('disambiguate')
# the elsevier_client will log messages that it is safe to ignore
elsevier_logger.setLevel(logging.WARNING)
def get_fulltexts_from_entrez(hgnc_name):
pmids = pubmed_client.get_ids_for_gene(hgnc_name)
articles = (pubmed_client.get_article_xml(pmid) for pmid in pmids)
fulltexts = [_universal_extract_text(article) for article in articles]
return fulltexts
def _universal_extract_text(xml):
# first try to parse the xml as if it came from elsevier. if we do not
# have valid elsevier xml this will throw an exception.
# the text extraction function in the pmc client may not throw an
# exception when parsing elsevier xml, silently processing the xml
# incorrectly
try:
fulltext = elsevier_client.extract_text(xml)
except Exception:
try:
fulltext = pmc_client.extract_text(xml)
except Exception:
# fall back by returning input string unmodified
fulltext = xml
return fulltext
def _get_text_from_pmids(pmids):
pmc_content = set(pubmed_client.filter_pmids(pmids))
pmc_ids = (pmc_client.id_lookup(pmid, idtype='pmid')['pmcid']
for pmid in pmc_content)
pmc_xmls = (pmc_client.get_xml(pmc_id) for pmc_id in pmc_ids)
pmc_texts = set(_universal_extract_text(xml) for xml in pmc_xmls)
other_content = set(pmids) - pmc_content
ids = (pmc_client.id_lookup(pmid, idtype='pmid') for pmid in pmids)
elsevier_content = (elsevier_client.download_article_from_id(pmid)
for pmid in pmids)
|
|
a4ed98ddec2afd5121f90db3ed6927c078958f81
|
cdf/jinja_utils.py
|
cdf/jinja_utils.py
|
from jinja2 import (
select_autoescape,
Environment,
FileSystemLoader,
)
template_env = Environment(
loader=FileSystemLoader("templates"),
autoescape=select_autoescape(['html', 'xml'])
)
|
Create a jinja2 env for rendering templates
|
Create a jinja2 env for rendering templates
|
Python
|
mit
|
ana-balica/classy-django-forms,ana-balica/classy-django-forms,ana-balica/classy-django-forms
|
Create a jinja2 env for rendering templates
|
from jinja2 import (
select_autoescape,
Environment,
FileSystemLoader,
)
template_env = Environment(
loader=FileSystemLoader("templates"),
autoescape=select_autoescape(['html', 'xml'])
)
|
<commit_before><commit_msg>Create a jinja2 env for rendering templates<commit_after>
|
from jinja2 import (
select_autoescape,
Environment,
FileSystemLoader,
)
template_env = Environment(
loader=FileSystemLoader("templates"),
autoescape=select_autoescape(['html', 'xml'])
)
|
Create a jinja2 env for rendering templatesfrom jinja2 import (
select_autoescape,
Environment,
FileSystemLoader,
)
template_env = Environment(
loader=FileSystemLoader("templates"),
autoescape=select_autoescape(['html', 'xml'])
)
|
<commit_before><commit_msg>Create a jinja2 env for rendering templates<commit_after>from jinja2 import (
select_autoescape,
Environment,
FileSystemLoader,
)
template_env = Environment(
loader=FileSystemLoader("templates"),
autoescape=select_autoescape(['html', 'xml'])
)
|
|
110445d89f230cc71bfd970845e0e63c13eeaff0
|
tests/test_datastore.py
|
tests/test_datastore.py
|
import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
urls_to_get = [
"http://datastore.iatistandard.org/"
]
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = self._get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
|
Add tests for the IATI Datastore This adds a 200 response and link checks for the IATI Datastore
|
Add tests for the IATI Datastore
This adds a 200 response and link checks for the IATI Datastore
|
Python
|
mit
|
IATI/IATI-Website-Tests
|
Add tests for the IATI Datastore
This adds a 200 response and link checks for the IATI Datastore
|
import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
urls_to_get = [
"http://datastore.iatistandard.org/"
]
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = self._get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
|
<commit_before><commit_msg>Add tests for the IATI Datastore
This adds a 200 response and link checks for the IATI Datastore<commit_after>
|
import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
urls_to_get = [
"http://datastore.iatistandard.org/"
]
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = self._get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
|
Add tests for the IATI Datastore
This adds a 200 response and link checks for the IATI Datastoreimport pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
urls_to_get = [
"http://datastore.iatistandard.org/"
]
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = self._get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
|
<commit_before><commit_msg>Add tests for the IATI Datastore
This adds a 200 response and link checks for the IATI Datastore<commit_after>import pytest
from web_test_base import *
class TestIATIDatastore(WebTestBase):
urls_to_get = [
"http://datastore.iatistandard.org/"
]
def test_contains_links(self, loaded_request):
"""
Test that each page contains links to the defined URLs.
"""
result = self._get_links_from_page(loaded_request)
assert "http://iatiregistry.org/" in result
|
|
3e3c2357305cee7bff1ee4f6697ad78f96c7ea04
|
tests/test_sqlite_db.py
|
tests/test_sqlite_db.py
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
Add first unit tests for sqlite db
|
Add first unit tests for sqlite db
|
Python
|
mpl-2.0
|
awest1339/multiscanner,jmlong1027/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,awest1339/multiscanner,MITRECND/multiscanner,mitre/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,MITRECND/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,awest1339/multiscanner
|
Add first unit tests for sqlite db
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
<commit_before><commit_msg>Add first unit tests for sqlite db<commit_after>
|
'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
Add first unit tests for sqlite db'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
<commit_before><commit_msg>Add first unit tests for sqlite db<commit_after>'''
Module for testing the SQLite DB.
Fairly similar to the test_api tests...
'''
import os
import sys
import json
import unittest
CWD = os.path.dirname(os.path.abspath(__file__))
MS_WD = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Allow import of sqlite_driver
if os.path.join(MS_WD, 'storage') not in sys.path:
sys.path.insert(0, os.path.join(MS_WD, 'storage'))
# Use multiscanner in ../
sys.path.insert(0, os.path.dirname(CWD))
from sqlite_driver import Database, Record
TEST_DB_PATH = os.path.join(CWD, 'testing.db')
TEST_UPLOAD_FOLDER = os.path.join(CWD, 'tmp')
TEST_REPORT = {'MD5': '96b47da202ddba8d7a6b91fecbf89a41', 'SHA256': '26d11f0ea5cc77a59b6e47deee859440f26d2d14440beb712dbac8550d35ef1f', 'libmagic': 'a /bin/python script text executable', 'filename': '/opt/other_file'}
class TestRecordSerialization(unittest.TestCase):
def setUp(self):
self.sql_db = Database(TEST_DB_PATH)
self.sql_db.init_sqlite_db()
self.record = Record(
task_id = 1,
task_status = 'Pending',
report_id = None
)
def test_record_dict_serialization(self):
self.assertDictEqual(
{'task_id': 1, 'task_status': 'Pending', 'report_id': None},
self.record.to_dict()
)
def test_record_json_serialization(self):
self.assertEqual(
json.dumps({'task_id': 1, 'task_status': 'Pending', 'report_id': None}),
self.record.to_json()
)
def tearDown(self):
os.remove(TEST_DB_PATH)
|
|
153618fbd1bb23d57f14e52f09dc19720df83fd4
|
tests/test_transform.py
|
tests/test_transform.py
|
import unittest
class RoadSortKeyTest(unittest.TestCase):
def _call_fut(self, props, zoom=15, shape=None, fid=None):
from TileStache.Goodies.VecTiles.transform import road_sort_key
_, newprops, fid = road_sort_key(shape, props, fid, zoom)
sort_key = newprops['sort_key']
return sort_key
def _call_fut_with_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
return self._call_fut({key: val}, zoom, shape, fid)
def _call_fut_with_float_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
val = float(val)
return self._call_fut({key: val}, zoom, shape, fid)
def test_layer_low(self):
sort_key = self._call_fut_with_float_prop('layer=-5')
self.assertEqual(300, sort_key)
def test_tunnel(self):
sort_key = self._call_fut_with_prop('tunnel=yes')
self.assertEqual(315, sort_key)
def test_empty(self):
sort_key = self._call_fut({})
self.assertEqual(325, sort_key)
def test_railway_service_highway(self):
props = dict(
railway='rail',
service='unknown',
highway='service'
)
sort_key = self._call_fut(props)
self.assertEqual(334, sort_key)
def test_link(self):
sort_key = self._call_fut_with_prop('highway=primary_link')
self.assertEqual(338, sort_key)
def test_railway(self):
sort_key = self._call_fut_with_prop('railway=rail')
self.assertEqual(343, sort_key)
def test_motorway(self):
sort_key = self._call_fut_with_prop('highway=motorway')
self.assertEqual(344, sort_key)
def test_bridge(self):
sort_key = self._call_fut_with_prop('bridge=yes')
self.assertEqual(365, sort_key)
def test_aerialway(self):
sort_key = self._call_fut_with_prop('aerialway=gondola')
self.assertEqual(377, sort_key)
def test_layer_high(self):
sort_key = self._call_fut_with_float_prop('layer=5')
self.assertEqual(385, sort_key)
|
Add road sort key tests
|
Add road sort key tests
|
Python
|
mit
|
mapzen/tilequeue,tilezen/tilequeue
|
Add road sort key tests
|
import unittest
class RoadSortKeyTest(unittest.TestCase):
def _call_fut(self, props, zoom=15, shape=None, fid=None):
from TileStache.Goodies.VecTiles.transform import road_sort_key
_, newprops, fid = road_sort_key(shape, props, fid, zoom)
sort_key = newprops['sort_key']
return sort_key
def _call_fut_with_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
return self._call_fut({key: val}, zoom, shape, fid)
def _call_fut_with_float_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
val = float(val)
return self._call_fut({key: val}, zoom, shape, fid)
def test_layer_low(self):
sort_key = self._call_fut_with_float_prop('layer=-5')
self.assertEqual(300, sort_key)
def test_tunnel(self):
sort_key = self._call_fut_with_prop('tunnel=yes')
self.assertEqual(315, sort_key)
def test_empty(self):
sort_key = self._call_fut({})
self.assertEqual(325, sort_key)
def test_railway_service_highway(self):
props = dict(
railway='rail',
service='unknown',
highway='service'
)
sort_key = self._call_fut(props)
self.assertEqual(334, sort_key)
def test_link(self):
sort_key = self._call_fut_with_prop('highway=primary_link')
self.assertEqual(338, sort_key)
def test_railway(self):
sort_key = self._call_fut_with_prop('railway=rail')
self.assertEqual(343, sort_key)
def test_motorway(self):
sort_key = self._call_fut_with_prop('highway=motorway')
self.assertEqual(344, sort_key)
def test_bridge(self):
sort_key = self._call_fut_with_prop('bridge=yes')
self.assertEqual(365, sort_key)
def test_aerialway(self):
sort_key = self._call_fut_with_prop('aerialway=gondola')
self.assertEqual(377, sort_key)
def test_layer_high(self):
sort_key = self._call_fut_with_float_prop('layer=5')
self.assertEqual(385, sort_key)
|
<commit_before><commit_msg>Add road sort key tests<commit_after>
|
import unittest
class RoadSortKeyTest(unittest.TestCase):
def _call_fut(self, props, zoom=15, shape=None, fid=None):
from TileStache.Goodies.VecTiles.transform import road_sort_key
_, newprops, fid = road_sort_key(shape, props, fid, zoom)
sort_key = newprops['sort_key']
return sort_key
def _call_fut_with_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
return self._call_fut({key: val}, zoom, shape, fid)
def _call_fut_with_float_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
val = float(val)
return self._call_fut({key: val}, zoom, shape, fid)
def test_layer_low(self):
sort_key = self._call_fut_with_float_prop('layer=-5')
self.assertEqual(300, sort_key)
def test_tunnel(self):
sort_key = self._call_fut_with_prop('tunnel=yes')
self.assertEqual(315, sort_key)
def test_empty(self):
sort_key = self._call_fut({})
self.assertEqual(325, sort_key)
def test_railway_service_highway(self):
props = dict(
railway='rail',
service='unknown',
highway='service'
)
sort_key = self._call_fut(props)
self.assertEqual(334, sort_key)
def test_link(self):
sort_key = self._call_fut_with_prop('highway=primary_link')
self.assertEqual(338, sort_key)
def test_railway(self):
sort_key = self._call_fut_with_prop('railway=rail')
self.assertEqual(343, sort_key)
def test_motorway(self):
sort_key = self._call_fut_with_prop('highway=motorway')
self.assertEqual(344, sort_key)
def test_bridge(self):
sort_key = self._call_fut_with_prop('bridge=yes')
self.assertEqual(365, sort_key)
def test_aerialway(self):
sort_key = self._call_fut_with_prop('aerialway=gondola')
self.assertEqual(377, sort_key)
def test_layer_high(self):
sort_key = self._call_fut_with_float_prop('layer=5')
self.assertEqual(385, sort_key)
|
Add road sort key testsimport unittest
class RoadSortKeyTest(unittest.TestCase):
def _call_fut(self, props, zoom=15, shape=None, fid=None):
from TileStache.Goodies.VecTiles.transform import road_sort_key
_, newprops, fid = road_sort_key(shape, props, fid, zoom)
sort_key = newprops['sort_key']
return sort_key
def _call_fut_with_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
return self._call_fut({key: val}, zoom, shape, fid)
def _call_fut_with_float_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
val = float(val)
return self._call_fut({key: val}, zoom, shape, fid)
def test_layer_low(self):
sort_key = self._call_fut_with_float_prop('layer=-5')
self.assertEqual(300, sort_key)
def test_tunnel(self):
sort_key = self._call_fut_with_prop('tunnel=yes')
self.assertEqual(315, sort_key)
def test_empty(self):
sort_key = self._call_fut({})
self.assertEqual(325, sort_key)
def test_railway_service_highway(self):
props = dict(
railway='rail',
service='unknown',
highway='service'
)
sort_key = self._call_fut(props)
self.assertEqual(334, sort_key)
def test_link(self):
sort_key = self._call_fut_with_prop('highway=primary_link')
self.assertEqual(338, sort_key)
def test_railway(self):
sort_key = self._call_fut_with_prop('railway=rail')
self.assertEqual(343, sort_key)
def test_motorway(self):
sort_key = self._call_fut_with_prop('highway=motorway')
self.assertEqual(344, sort_key)
def test_bridge(self):
sort_key = self._call_fut_with_prop('bridge=yes')
self.assertEqual(365, sort_key)
def test_aerialway(self):
sort_key = self._call_fut_with_prop('aerialway=gondola')
self.assertEqual(377, sort_key)
def test_layer_high(self):
sort_key = self._call_fut_with_float_prop('layer=5')
self.assertEqual(385, sort_key)
|
<commit_before><commit_msg>Add road sort key tests<commit_after>import unittest
class RoadSortKeyTest(unittest.TestCase):
def _call_fut(self, props, zoom=15, shape=None, fid=None):
from TileStache.Goodies.VecTiles.transform import road_sort_key
_, newprops, fid = road_sort_key(shape, props, fid, zoom)
sort_key = newprops['sort_key']
return sort_key
def _call_fut_with_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
return self._call_fut({key: val}, zoom, shape, fid)
def _call_fut_with_float_prop(self, keyval, zoom=15, shape=None,
fid=None):
key, val = keyval.split('=')
val = float(val)
return self._call_fut({key: val}, zoom, shape, fid)
def test_layer_low(self):
sort_key = self._call_fut_with_float_prop('layer=-5')
self.assertEqual(300, sort_key)
def test_tunnel(self):
sort_key = self._call_fut_with_prop('tunnel=yes')
self.assertEqual(315, sort_key)
def test_empty(self):
sort_key = self._call_fut({})
self.assertEqual(325, sort_key)
def test_railway_service_highway(self):
props = dict(
railway='rail',
service='unknown',
highway='service'
)
sort_key = self._call_fut(props)
self.assertEqual(334, sort_key)
def test_link(self):
sort_key = self._call_fut_with_prop('highway=primary_link')
self.assertEqual(338, sort_key)
def test_railway(self):
sort_key = self._call_fut_with_prop('railway=rail')
self.assertEqual(343, sort_key)
def test_motorway(self):
sort_key = self._call_fut_with_prop('highway=motorway')
self.assertEqual(344, sort_key)
def test_bridge(self):
sort_key = self._call_fut_with_prop('bridge=yes')
self.assertEqual(365, sort_key)
def test_aerialway(self):
sort_key = self._call_fut_with_prop('aerialway=gondola')
self.assertEqual(377, sort_key)
def test_layer_high(self):
sort_key = self._call_fut_with_float_prop('layer=5')
self.assertEqual(385, sort_key)
|
|
42bb37178827669e89e9222c24f79de38c5b75b8
|
gen-changelog.py
|
gen-changelog.py
|
# Writes a changelog in trac WikiFormatting based on a git log
from __future__ import unicode_literals, division, absolute_import
import codecs
from itertools import ifilter
import os
import re
import subprocess
import sys
import dateutil.parser
out_path = 'ChangeLog'
if len(sys.argv) > 1:
dir_name = os.path.dirname(sys.argv[1])
if dir_name and not os.path.isdir(dir_name):
print 'Output dir doesn\'t exist: %s' % sys.argv[1]
sys.exit(1)
out_path = sys.argv[1]
# 1.0.3280 was last revision on svn
git_log_output = subprocess.check_output(['git', 'log', '--pretty=%n---%n.%d%n%ci%n%h%n%s%n%-b%n---%n', '--topo-order', 'refs/tags/1.0.3280..HEAD'])
git_log_iter = ifilter(None, git_log_output.decode('utf-8').splitlines())
with codecs.open(out_path, 'w', encoding='utf-8') as out_file:
for line in git_log_iter:
assert line == '---'
tag = re.search('tag: ([\d.]+)', next(git_log_iter))
date = dateutil.parser.parse(next(git_log_iter))
commit_hash = next(git_log_iter)
body = list(iter(git_log_iter.next, '---'))
if tag:
out_file.write('\n=== %s (%s) ===\n\n' % (tag.group(1), date.strftime('%Y.%m.%d')))
out_file.write(' * (%s) %s\n' % (commit_hash, '[[BR]]\n '.join(body)))
|
Add a helper script to generate a changelog from git log
|
Add a helper script to generate a changelog from git log
|
Python
|
mit
|
tsnoam/Flexget,OmgOhnoes/Flexget,v17al/Flexget,ZefQ/Flexget,qvazzler/Flexget,ianstalk/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,tarzasai/Flexget,jacobmetrick/Flexget,ibrahimkarahan/Flexget,JorisDeRieck/Flexget,jawilson/Flexget,Danfocus/Flexget,jawilson/Flexget,cvium/Flexget,Flexget/Flexget,poulpito/Flexget,vfrc2/Flexget,v17al/Flexget,ZefQ/Flexget,vfrc2/Flexget,Danfocus/Flexget,drwyrm/Flexget,ibrahimkarahan/Flexget,xfouloux/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,drwyrm/Flexget,X-dark/Flexget,thalamus/Flexget,gazpachoking/Flexget,X-dark/Flexget,v17al/Flexget,Flexget/Flexget,asm0dey/Flexget,Danfocus/Flexget,xfouloux/Flexget,crawln45/Flexget,xfouloux/Flexget,lildadou/Flexget,thalamus/Flexget,ratoaq2/Flexget,jawilson/Flexget,antivirtel/Flexget,oxc/Flexget,antivirtel/Flexget,asm0dey/Flexget,lildadou/Flexget,sean797/Flexget,qvazzler/Flexget,Pretagonist/Flexget,tobinjt/Flexget,asm0dey/Flexget,tvcsantos/Flexget,Danfocus/Flexget,gazpachoking/Flexget,ZefQ/Flexget,spencerjanssen/Flexget,tobinjt/Flexget,offbyone/Flexget,grrr2/Flexget,tobinjt/Flexget,crawln45/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,patsissons/Flexget,dsemi/Flexget,spencerjanssen/Flexget,LynxyssCZ/Flexget,sean797/Flexget,ibrahimkarahan/Flexget,dsemi/Flexget,qk4l/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,ianstalk/Flexget,tarzasai/Flexget,offbyone/Flexget,tsnoam/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,crawln45/Flexget,grrr2/Flexget,crawln45/Flexget,jacobmetrick/Flexget,grrr2/Flexget,thalamus/Flexget,Pretagonist/Flexget,qvazzler/Flexget,patsissons/Flexget,drwyrm/Flexget,tobinjt/Flexget,ratoaq2/Flexget,cvium/Flexget,antivirtel/Flexget,patsissons/Flexget,poulpito/Flexget,jacobmetrick/Flexget,spencerjanssen/Flexget,voriux/Flexget,malkavi/Flexget,oxc/Flexget,malkavi/Flexget,voriux/Flexget,tsnoam/Flexget,poulpito/Flexget,X-dark/Flexget,tvcsantos/Flexget,camon/Flexget,qk4l/Flexget,sean797/Flexget,dsemi/Flexget,JorisDeRieck/Flexget,offbyone/Flexget,cvium/Flexget,tarzasai/Flexget,vfrc2/Flexget,jawilson/Flexget,camon/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,Flexget/Flexget,ratoaq2/Flexget,Pretagonist/Flexget,oxc/Flexget
|
Add a helper script to generate a changelog from git log
|
# Writes a changelog in trac WikiFormatting based on a git log
from __future__ import unicode_literals, division, absolute_import
import codecs
from itertools import ifilter
import os
import re
import subprocess
import sys
import dateutil.parser
out_path = 'ChangeLog'
if len(sys.argv) > 1:
dir_name = os.path.dirname(sys.argv[1])
if dir_name and not os.path.isdir(dir_name):
print 'Output dir doesn\'t exist: %s' % sys.argv[1]
sys.exit(1)
out_path = sys.argv[1]
# 1.0.3280 was last revision on svn
git_log_output = subprocess.check_output(['git', 'log', '--pretty=%n---%n.%d%n%ci%n%h%n%s%n%-b%n---%n', '--topo-order', 'refs/tags/1.0.3280..HEAD'])
git_log_iter = ifilter(None, git_log_output.decode('utf-8').splitlines())
with codecs.open(out_path, 'w', encoding='utf-8') as out_file:
for line in git_log_iter:
assert line == '---'
tag = re.search('tag: ([\d.]+)', next(git_log_iter))
date = dateutil.parser.parse(next(git_log_iter))
commit_hash = next(git_log_iter)
body = list(iter(git_log_iter.next, '---'))
if tag:
out_file.write('\n=== %s (%s) ===\n\n' % (tag.group(1), date.strftime('%Y.%m.%d')))
out_file.write(' * (%s) %s\n' % (commit_hash, '[[BR]]\n '.join(body)))
|
<commit_before><commit_msg>Add a helper script to generate a changelog from git log<commit_after>
|
# Writes a changelog in trac WikiFormatting based on a git log
from __future__ import unicode_literals, division, absolute_import
import codecs
from itertools import ifilter
import os
import re
import subprocess
import sys
import dateutil.parser
out_path = 'ChangeLog'
if len(sys.argv) > 1:
dir_name = os.path.dirname(sys.argv[1])
if dir_name and not os.path.isdir(dir_name):
print 'Output dir doesn\'t exist: %s' % sys.argv[1]
sys.exit(1)
out_path = sys.argv[1]
# 1.0.3280 was last revision on svn
git_log_output = subprocess.check_output(['git', 'log', '--pretty=%n---%n.%d%n%ci%n%h%n%s%n%-b%n---%n', '--topo-order', 'refs/tags/1.0.3280..HEAD'])
git_log_iter = ifilter(None, git_log_output.decode('utf-8').splitlines())
with codecs.open(out_path, 'w', encoding='utf-8') as out_file:
for line in git_log_iter:
assert line == '---'
tag = re.search('tag: ([\d.]+)', next(git_log_iter))
date = dateutil.parser.parse(next(git_log_iter))
commit_hash = next(git_log_iter)
body = list(iter(git_log_iter.next, '---'))
if tag:
out_file.write('\n=== %s (%s) ===\n\n' % (tag.group(1), date.strftime('%Y.%m.%d')))
out_file.write(' * (%s) %s\n' % (commit_hash, '[[BR]]\n '.join(body)))
|
Add a helper script to generate a changelog from git log# Writes a changelog in trac WikiFormatting based on a git log
from __future__ import unicode_literals, division, absolute_import
import codecs
from itertools import ifilter
import os
import re
import subprocess
import sys
import dateutil.parser
out_path = 'ChangeLog'
if len(sys.argv) > 1:
dir_name = os.path.dirname(sys.argv[1])
if dir_name and not os.path.isdir(dir_name):
print 'Output dir doesn\'t exist: %s' % sys.argv[1]
sys.exit(1)
out_path = sys.argv[1]
# 1.0.3280 was last revision on svn
git_log_output = subprocess.check_output(['git', 'log', '--pretty=%n---%n.%d%n%ci%n%h%n%s%n%-b%n---%n', '--topo-order', 'refs/tags/1.0.3280..HEAD'])
git_log_iter = ifilter(None, git_log_output.decode('utf-8').splitlines())
with codecs.open(out_path, 'w', encoding='utf-8') as out_file:
for line in git_log_iter:
assert line == '---'
tag = re.search('tag: ([\d.]+)', next(git_log_iter))
date = dateutil.parser.parse(next(git_log_iter))
commit_hash = next(git_log_iter)
body = list(iter(git_log_iter.next, '---'))
if tag:
out_file.write('\n=== %s (%s) ===\n\n' % (tag.group(1), date.strftime('%Y.%m.%d')))
out_file.write(' * (%s) %s\n' % (commit_hash, '[[BR]]\n '.join(body)))
|
<commit_before><commit_msg>Add a helper script to generate a changelog from git log<commit_after># Writes a changelog in trac WikiFormatting based on a git log
from __future__ import unicode_literals, division, absolute_import
import codecs
from itertools import ifilter
import os
import re
import subprocess
import sys
import dateutil.parser
out_path = 'ChangeLog'
if len(sys.argv) > 1:
dir_name = os.path.dirname(sys.argv[1])
if dir_name and not os.path.isdir(dir_name):
print 'Output dir doesn\'t exist: %s' % sys.argv[1]
sys.exit(1)
out_path = sys.argv[1]
# 1.0.3280 was last revision on svn
git_log_output = subprocess.check_output(['git', 'log', '--pretty=%n---%n.%d%n%ci%n%h%n%s%n%-b%n---%n', '--topo-order', 'refs/tags/1.0.3280..HEAD'])
git_log_iter = ifilter(None, git_log_output.decode('utf-8').splitlines())
with codecs.open(out_path, 'w', encoding='utf-8') as out_file:
for line in git_log_iter:
assert line == '---'
tag = re.search('tag: ([\d.]+)', next(git_log_iter))
date = dateutil.parser.parse(next(git_log_iter))
commit_hash = next(git_log_iter)
body = list(iter(git_log_iter.next, '---'))
if tag:
out_file.write('\n=== %s (%s) ===\n\n' % (tag.group(1), date.strftime('%Y.%m.%d')))
out_file.write(' * (%s) %s\n' % (commit_hash, '[[BR]]\n '.join(body)))
|
|
c6e15f8e66fe542c459ac75f1ef6db080249e9d0
|
py/test/selenium/webdriver/chrome/conftest.py
|
py/test/selenium/webdriver/chrome/conftest.py
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def pytest_generate_tests(metafunc):
if 'driver' in metafunc.fixturenames and metafunc.config.option.drivers:
metafunc.parametrize('driver', metafunc.config.option.drivers, indirect=True)
|
Allow driver fixtures to be used in Chrome only tests
|
[py] Allow driver fixtures to be used in Chrome only tests
|
Python
|
apache-2.0
|
titusfortner/selenium,HtmlUnit/selenium,Ardesco/selenium,joshmgrant/selenium,HtmlUnit/selenium,titusfortner/selenium,SeleniumHQ/selenium,valfirst/selenium,HtmlUnit/selenium,HtmlUnit/selenium,valfirst/selenium,SeleniumHQ/selenium,Ardesco/selenium,HtmlUnit/selenium,HtmlUnit/selenium,Ardesco/selenium,Ardesco/selenium,SeleniumHQ/selenium,HtmlUnit/selenium,titusfortner/selenium,HtmlUnit/selenium,titusfortner/selenium,joshmgrant/selenium,joshmgrant/selenium,titusfortner/selenium,joshmgrant/selenium,valfirst/selenium,valfirst/selenium,joshmgrant/selenium,titusfortner/selenium,titusfortner/selenium,joshmgrant/selenium,joshmgrant/selenium,joshmgrant/selenium,valfirst/selenium,SeleniumHQ/selenium,Ardesco/selenium,joshmgrant/selenium,titusfortner/selenium,HtmlUnit/selenium,SeleniumHQ/selenium,titusfortner/selenium,joshmgrant/selenium,valfirst/selenium,SeleniumHQ/selenium,SeleniumHQ/selenium,valfirst/selenium,joshmgrant/selenium,SeleniumHQ/selenium,valfirst/selenium,SeleniumHQ/selenium,valfirst/selenium,HtmlUnit/selenium,valfirst/selenium,titusfortner/selenium,Ardesco/selenium,titusfortner/selenium,valfirst/selenium,SeleniumHQ/selenium,Ardesco/selenium,Ardesco/selenium,SeleniumHQ/selenium,Ardesco/selenium
|
[py] Allow driver fixtures to be used in Chrome only tests
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def pytest_generate_tests(metafunc):
if 'driver' in metafunc.fixturenames and metafunc.config.option.drivers:
metafunc.parametrize('driver', metafunc.config.option.drivers, indirect=True)
|
<commit_before><commit_msg>[py] Allow driver fixtures to be used in Chrome only tests<commit_after>
|
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def pytest_generate_tests(metafunc):
if 'driver' in metafunc.fixturenames and metafunc.config.option.drivers:
metafunc.parametrize('driver', metafunc.config.option.drivers, indirect=True)
|
[py] Allow driver fixtures to be used in Chrome only tests# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def pytest_generate_tests(metafunc):
if 'driver' in metafunc.fixturenames and metafunc.config.option.drivers:
metafunc.parametrize('driver', metafunc.config.option.drivers, indirect=True)
|
<commit_before><commit_msg>[py] Allow driver fixtures to be used in Chrome only tests<commit_after># Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
def pytest_generate_tests(metafunc):
if 'driver' in metafunc.fixturenames and metafunc.config.option.drivers:
metafunc.parametrize('driver', metafunc.config.option.drivers, indirect=True)
|
|
d07f0ab7f437809a2811f0f1fdab396cbcac740f
|
python/opencv/opencv_2/display_image.py
|
python/opencv/opencv_2/display_image.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Display image: display an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to display)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to display", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img = cv.imread(infile_str, imread_flags)
window_name = 'display_image_snippet'
cv.imshow(window_name, img) # Display the image "img" in a window
print("Press any key to quit.")
wait_time_ms = 0 # The time to wait for a keyboard event (0 = wait indefinitely)
cv.waitKey(wait_time_ms)
cv.destroyAllWindows()
if __name__ == '__main__':
main()
|
Add a snippet (Python OpenCV).
|
Add a snippet (Python OpenCV).
|
Python
|
mit
|
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
|
Add a snippet (Python OpenCV).
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Display image: display an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to display)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to display", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img = cv.imread(infile_str, imread_flags)
window_name = 'display_image_snippet'
cv.imshow(window_name, img) # Display the image "img" in a window
print("Press any key to quit.")
wait_time_ms = 0 # The time to wait for a keyboard event (0 = wait indefinitely)
cv.waitKey(wait_time_ms)
cv.destroyAllWindows()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Display image: display an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to display)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to display", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img = cv.imread(infile_str, imread_flags)
window_name = 'display_image_snippet'
cv.imshow(window_name, img) # Display the image "img" in a window
print("Press any key to quit.")
wait_time_ms = 0 # The time to wait for a keyboard event (0 = wait indefinitely)
cv.waitKey(wait_time_ms)
cv.destroyAllWindows()
if __name__ == '__main__':
main()
|
Add a snippet (Python OpenCV).#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Display image: display an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to display)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to display", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img = cv.imread(infile_str, imread_flags)
window_name = 'display_image_snippet'
cv.imshow(window_name, img) # Display the image "img" in a window
print("Press any key to quit.")
wait_time_ms = 0 # The time to wait for a keyboard event (0 = wait indefinitely)
cv.waitKey(wait_time_ms)
cv.destroyAllWindows()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org)
"""
OpenCV - Display image: display an image given in arguments
Required: opencv library (Debian: aptitude install python-opencv)
See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#display-an-image
Oreilly's book "Learning OpenCV" (first edition) p.17 (OpenCV for C)
"""
from __future__ import print_function
import cv2 as cv
import argparse
def main():
# Parse the programm options (get the path of the image file to display)
parser = argparse.ArgumentParser(description='An opencv snippet.')
parser.add_argument("--infile", "-i", help="The picture file to display", required=True, metavar="FILE")
args = parser.parse_args()
infile_str = args.infile
# OpenCV
# imread_flags is a flag which specifies the way image should be read:
# - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag.
# - cv.IMREAD_GRAYSCALE loads image in grayscale mode
# - cv.IMREAD_UNCHANGED loads image as such including alpha channel
imread_flags = cv.IMREAD_UNCHANGED
img = cv.imread(infile_str, imread_flags)
window_name = 'display_image_snippet'
cv.imshow(window_name, img) # Display the image "img" in a window
print("Press any key to quit.")
wait_time_ms = 0 # The time to wait for a keyboard event (0 = wait indefinitely)
cv.waitKey(wait_time_ms)
cv.destroyAllWindows()
if __name__ == '__main__':
main()
|
|
13c3190c6e89c2037c9b2bcce8a89dd43a58b53f
|
rummage/lib/gui/controls/time_picker.py
|
rummage/lib/gui/controls/time_picker.py
|
"""Custom time picker that allows us control of the control's color."""
from wx.lib.masked import TimeCtrl
from ... util import rgba
import wx
class TimePickerCtrl(TimeCtrl):
"""Time picker that we can force proper colors on."""
def __init__(self, parent, *args, **kwargs):
"""
Initialize.
Create a temporary text control so we can get proper
background and foreground colors.
"""
ctrl = wx.TextCtrl(parent)
self._fg = ctrl.GetForegroundColour().GetRGB()
self._bg = ctrl.GetBackgroundColour().GetRGB()
bg = rgba.RGBA(0xFF0000)
bg.blend(rgba.RGBA(ctrl.GetBackgroundColour().Get()), 60)
self._error_bg = wx.Colour(*bg.get_rgb()).GetRGB()
ctrl.Destroy()
super().__init__(parent, *args, **kwargs)
def SetParameters(self, **kwargs):
"""Force the colors we want."""
if 'oob_color' in kwargs:
del kwargs['oob_color']
maskededit_kwargs = super().SetParameters(**kwargs)
maskededit_kwargs['emptyBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['validBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['invalidBackgroundColour'] = wx.Colour(self._error_bg)
maskededit_kwargs['foregroundColour'] = wx.Colour(self._fg)
maskededit_kwargs['signedForegroundColour'] = wx.Colour(self._fg)
return maskededit_kwargs
|
Add custom time picker control
|
Add custom time picker control
|
Python
|
mit
|
facelessuser/Rummage,facelessuser/Rummage,facelessuser/Rummage
|
Add custom time picker control
|
"""Custom time picker that allows us control of the control's color."""
from wx.lib.masked import TimeCtrl
from ... util import rgba
import wx
class TimePickerCtrl(TimeCtrl):
"""Time picker that we can force proper colors on."""
def __init__(self, parent, *args, **kwargs):
"""
Initialize.
Create a temporary text control so we can get proper
background and foreground colors.
"""
ctrl = wx.TextCtrl(parent)
self._fg = ctrl.GetForegroundColour().GetRGB()
self._bg = ctrl.GetBackgroundColour().GetRGB()
bg = rgba.RGBA(0xFF0000)
bg.blend(rgba.RGBA(ctrl.GetBackgroundColour().Get()), 60)
self._error_bg = wx.Colour(*bg.get_rgb()).GetRGB()
ctrl.Destroy()
super().__init__(parent, *args, **kwargs)
def SetParameters(self, **kwargs):
"""Force the colors we want."""
if 'oob_color' in kwargs:
del kwargs['oob_color']
maskededit_kwargs = super().SetParameters(**kwargs)
maskededit_kwargs['emptyBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['validBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['invalidBackgroundColour'] = wx.Colour(self._error_bg)
maskededit_kwargs['foregroundColour'] = wx.Colour(self._fg)
maskededit_kwargs['signedForegroundColour'] = wx.Colour(self._fg)
return maskededit_kwargs
|
<commit_before><commit_msg>Add custom time picker control<commit_after>
|
"""Custom time picker that allows us control of the control's color."""
from wx.lib.masked import TimeCtrl
from ... util import rgba
import wx
class TimePickerCtrl(TimeCtrl):
"""Time picker that we can force proper colors on."""
def __init__(self, parent, *args, **kwargs):
"""
Initialize.
Create a temporary text control so we can get proper
background and foreground colors.
"""
ctrl = wx.TextCtrl(parent)
self._fg = ctrl.GetForegroundColour().GetRGB()
self._bg = ctrl.GetBackgroundColour().GetRGB()
bg = rgba.RGBA(0xFF0000)
bg.blend(rgba.RGBA(ctrl.GetBackgroundColour().Get()), 60)
self._error_bg = wx.Colour(*bg.get_rgb()).GetRGB()
ctrl.Destroy()
super().__init__(parent, *args, **kwargs)
def SetParameters(self, **kwargs):
"""Force the colors we want."""
if 'oob_color' in kwargs:
del kwargs['oob_color']
maskededit_kwargs = super().SetParameters(**kwargs)
maskededit_kwargs['emptyBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['validBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['invalidBackgroundColour'] = wx.Colour(self._error_bg)
maskededit_kwargs['foregroundColour'] = wx.Colour(self._fg)
maskededit_kwargs['signedForegroundColour'] = wx.Colour(self._fg)
return maskededit_kwargs
|
Add custom time picker control"""Custom time picker that allows us control of the control's color."""
from wx.lib.masked import TimeCtrl
from ... util import rgba
import wx
class TimePickerCtrl(TimeCtrl):
"""Time picker that we can force proper colors on."""
def __init__(self, parent, *args, **kwargs):
"""
Initialize.
Create a temporary text control so we can get proper
background and foreground colors.
"""
ctrl = wx.TextCtrl(parent)
self._fg = ctrl.GetForegroundColour().GetRGB()
self._bg = ctrl.GetBackgroundColour().GetRGB()
bg = rgba.RGBA(0xFF0000)
bg.blend(rgba.RGBA(ctrl.GetBackgroundColour().Get()), 60)
self._error_bg = wx.Colour(*bg.get_rgb()).GetRGB()
ctrl.Destroy()
super().__init__(parent, *args, **kwargs)
def SetParameters(self, **kwargs):
"""Force the colors we want."""
if 'oob_color' in kwargs:
del kwargs['oob_color']
maskededit_kwargs = super().SetParameters(**kwargs)
maskededit_kwargs['emptyBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['validBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['invalidBackgroundColour'] = wx.Colour(self._error_bg)
maskededit_kwargs['foregroundColour'] = wx.Colour(self._fg)
maskededit_kwargs['signedForegroundColour'] = wx.Colour(self._fg)
return maskededit_kwargs
|
<commit_before><commit_msg>Add custom time picker control<commit_after>"""Custom time picker that allows us control of the control's color."""
from wx.lib.masked import TimeCtrl
from ... util import rgba
import wx
class TimePickerCtrl(TimeCtrl):
"""Time picker that we can force proper colors on."""
def __init__(self, parent, *args, **kwargs):
"""
Initialize.
Create a temporary text control so we can get proper
background and foreground colors.
"""
ctrl = wx.TextCtrl(parent)
self._fg = ctrl.GetForegroundColour().GetRGB()
self._bg = ctrl.GetBackgroundColour().GetRGB()
bg = rgba.RGBA(0xFF0000)
bg.blend(rgba.RGBA(ctrl.GetBackgroundColour().Get()), 60)
self._error_bg = wx.Colour(*bg.get_rgb()).GetRGB()
ctrl.Destroy()
super().__init__(parent, *args, **kwargs)
def SetParameters(self, **kwargs):
"""Force the colors we want."""
if 'oob_color' in kwargs:
del kwargs['oob_color']
maskededit_kwargs = super().SetParameters(**kwargs)
maskededit_kwargs['emptyBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['validBackgroundColour'] = wx.Colour(self._bg)
maskededit_kwargs['invalidBackgroundColour'] = wx.Colour(self._error_bg)
maskededit_kwargs['foregroundColour'] = wx.Colour(self._fg)
maskededit_kwargs['signedForegroundColour'] = wx.Colour(self._fg)
return maskededit_kwargs
|
|
22102598e98371aed70866d8d8b48d96fb889708
|
liveplotkivy.py
|
liveplotkivy.py
|
'''
kivy requires some library installation
'''
import matplotlib
matplotlib.use('module://kivy.garden.matplotlib.backend_kivy')
import matplotlib.pyplot as plt
import kivy
kivy.require('1.10.0') # replace with your current kivy version !
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvas
import Recorder as rcd
class RootWidget(BoxLayout):
def __init__(self, rec = None, **kwargs):
super(RootWidget, self).__init__(**kwargs)
self.padding = 10
self.orientation = 'vertical'
self.rec = rec
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim(-5e4,5e4)
self.line = ax.plot(range(len(self.rec.signal_data)),
self.rec.signal_data)[0]
self.canvas_widget = FigureCanvas(fig)
self.add_widget(self.canvas_widget)
btn = Button(text='Switch')
self.add_widget(btn)
btn.size_hint_y = 0.1
event = Clock.schedule_interval(self.update_line, 1 / 60.)
def update_line(self,dt):
self.line.set_ydata(self.rec.signal_data)
self.canvas_widget.draw()
class MyApp(App):
def __init__(self):
super().__init__()
self.rec = rcd.Recorder(1,44100,1024,'Line (U24XL with SPDIF I/O)')
self.rec.stream_init(playback = True)
self.playing = True
self.rootwidget = RootWidget(rec = self.rec);
def build(self):
Window.bind(on_request_close=self.on_request_close)
return self.rootwidget
def on_request_close(self, *args,**kwargs):
self.exit_popup(title='Exit', text='Are you sure?')
return False
# Unable to implement an exit confirmation
def exit_popup(self,title,text):
vbox = BoxLayout(orientation='vertical')
vbox.add_widget(Label(text=text))
mybutton = Button(text='OK', size_hint=(0.5,1))
vbox.add_widget(mybutton)
popup = Popup(title=title, content=vbox, size_hint=(None, None), size= (600, 300))
mybutton.bind(on_release = self.stop)
popup.open()
def on_stop(self):
self.rec.stream_stop()
return False
if __name__ == '__main__':
MyApp().run()
|
Add Kivy version of live audio stream
|
Add Kivy version of live audio stream
|
Python
|
bsd-3-clause
|
torebutlin/cued_datalogger
|
Add Kivy version of live audio stream
|
'''
kivy requires some library installation
'''
import matplotlib
matplotlib.use('module://kivy.garden.matplotlib.backend_kivy')
import matplotlib.pyplot as plt
import kivy
kivy.require('1.10.0') # replace with your current kivy version !
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvas
import Recorder as rcd
class RootWidget(BoxLayout):
def __init__(self, rec = None, **kwargs):
super(RootWidget, self).__init__(**kwargs)
self.padding = 10
self.orientation = 'vertical'
self.rec = rec
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim(-5e4,5e4)
self.line = ax.plot(range(len(self.rec.signal_data)),
self.rec.signal_data)[0]
self.canvas_widget = FigureCanvas(fig)
self.add_widget(self.canvas_widget)
btn = Button(text='Switch')
self.add_widget(btn)
btn.size_hint_y = 0.1
event = Clock.schedule_interval(self.update_line, 1 / 60.)
def update_line(self,dt):
self.line.set_ydata(self.rec.signal_data)
self.canvas_widget.draw()
class MyApp(App):
def __init__(self):
super().__init__()
self.rec = rcd.Recorder(1,44100,1024,'Line (U24XL with SPDIF I/O)')
self.rec.stream_init(playback = True)
self.playing = True
self.rootwidget = RootWidget(rec = self.rec);
def build(self):
Window.bind(on_request_close=self.on_request_close)
return self.rootwidget
def on_request_close(self, *args,**kwargs):
self.exit_popup(title='Exit', text='Are you sure?')
return False
# Unable to implement an exit confirmation
def exit_popup(self,title,text):
vbox = BoxLayout(orientation='vertical')
vbox.add_widget(Label(text=text))
mybutton = Button(text='OK', size_hint=(0.5,1))
vbox.add_widget(mybutton)
popup = Popup(title=title, content=vbox, size_hint=(None, None), size= (600, 300))
mybutton.bind(on_release = self.stop)
popup.open()
def on_stop(self):
self.rec.stream_stop()
return False
if __name__ == '__main__':
MyApp().run()
|
<commit_before><commit_msg>Add Kivy version of live audio stream<commit_after>
|
'''
kivy requires some library installation
'''
import matplotlib
matplotlib.use('module://kivy.garden.matplotlib.backend_kivy')
import matplotlib.pyplot as plt
import kivy
kivy.require('1.10.0') # replace with your current kivy version !
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvas
import Recorder as rcd
class RootWidget(BoxLayout):
def __init__(self, rec = None, **kwargs):
super(RootWidget, self).__init__(**kwargs)
self.padding = 10
self.orientation = 'vertical'
self.rec = rec
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim(-5e4,5e4)
self.line = ax.plot(range(len(self.rec.signal_data)),
self.rec.signal_data)[0]
self.canvas_widget = FigureCanvas(fig)
self.add_widget(self.canvas_widget)
btn = Button(text='Switch')
self.add_widget(btn)
btn.size_hint_y = 0.1
event = Clock.schedule_interval(self.update_line, 1 / 60.)
def update_line(self,dt):
self.line.set_ydata(self.rec.signal_data)
self.canvas_widget.draw()
class MyApp(App):
def __init__(self):
super().__init__()
self.rec = rcd.Recorder(1,44100,1024,'Line (U24XL with SPDIF I/O)')
self.rec.stream_init(playback = True)
self.playing = True
self.rootwidget = RootWidget(rec = self.rec);
def build(self):
Window.bind(on_request_close=self.on_request_close)
return self.rootwidget
def on_request_close(self, *args,**kwargs):
self.exit_popup(title='Exit', text='Are you sure?')
return False
# Unable to implement an exit confirmation
def exit_popup(self,title,text):
vbox = BoxLayout(orientation='vertical')
vbox.add_widget(Label(text=text))
mybutton = Button(text='OK', size_hint=(0.5,1))
vbox.add_widget(mybutton)
popup = Popup(title=title, content=vbox, size_hint=(None, None), size= (600, 300))
mybutton.bind(on_release = self.stop)
popup.open()
def on_stop(self):
self.rec.stream_stop()
return False
if __name__ == '__main__':
MyApp().run()
|
Add Kivy version of live audio stream'''
kivy requires some library installation
'''
import matplotlib
matplotlib.use('module://kivy.garden.matplotlib.backend_kivy')
import matplotlib.pyplot as plt
import kivy
kivy.require('1.10.0') # replace with your current kivy version !
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvas
import Recorder as rcd
class RootWidget(BoxLayout):
def __init__(self, rec = None, **kwargs):
super(RootWidget, self).__init__(**kwargs)
self.padding = 10
self.orientation = 'vertical'
self.rec = rec
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim(-5e4,5e4)
self.line = ax.plot(range(len(self.rec.signal_data)),
self.rec.signal_data)[0]
self.canvas_widget = FigureCanvas(fig)
self.add_widget(self.canvas_widget)
btn = Button(text='Switch')
self.add_widget(btn)
btn.size_hint_y = 0.1
event = Clock.schedule_interval(self.update_line, 1 / 60.)
def update_line(self,dt):
self.line.set_ydata(self.rec.signal_data)
self.canvas_widget.draw()
class MyApp(App):
def __init__(self):
super().__init__()
self.rec = rcd.Recorder(1,44100,1024,'Line (U24XL with SPDIF I/O)')
self.rec.stream_init(playback = True)
self.playing = True
self.rootwidget = RootWidget(rec = self.rec);
def build(self):
Window.bind(on_request_close=self.on_request_close)
return self.rootwidget
def on_request_close(self, *args,**kwargs):
self.exit_popup(title='Exit', text='Are you sure?')
return False
# Unable to implement an exit confirmation
def exit_popup(self,title,text):
vbox = BoxLayout(orientation='vertical')
vbox.add_widget(Label(text=text))
mybutton = Button(text='OK', size_hint=(0.5,1))
vbox.add_widget(mybutton)
popup = Popup(title=title, content=vbox, size_hint=(None, None), size= (600, 300))
mybutton.bind(on_release = self.stop)
popup.open()
def on_stop(self):
self.rec.stream_stop()
return False
if __name__ == '__main__':
MyApp().run()
|
<commit_before><commit_msg>Add Kivy version of live audio stream<commit_after>'''
kivy requires some library installation
'''
import matplotlib
matplotlib.use('module://kivy.garden.matplotlib.backend_kivy')
import matplotlib.pyplot as plt
import kivy
kivy.require('1.10.0') # replace with your current kivy version !
from kivy.app import App
from kivy.uix.button import Button
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.popup import Popup
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.core.window import Window
from kivy.garden.matplotlib.backend_kivyagg import FigureCanvas
import Recorder as rcd
class RootWidget(BoxLayout):
def __init__(self, rec = None, **kwargs):
super(RootWidget, self).__init__(**kwargs)
self.padding = 10
self.orientation = 'vertical'
self.rec = rec
fig = plt.figure()
ax = fig.add_subplot(111)
ax.set_ylim(-5e4,5e4)
self.line = ax.plot(range(len(self.rec.signal_data)),
self.rec.signal_data)[0]
self.canvas_widget = FigureCanvas(fig)
self.add_widget(self.canvas_widget)
btn = Button(text='Switch')
self.add_widget(btn)
btn.size_hint_y = 0.1
event = Clock.schedule_interval(self.update_line, 1 / 60.)
def update_line(self,dt):
self.line.set_ydata(self.rec.signal_data)
self.canvas_widget.draw()
class MyApp(App):
def __init__(self):
super().__init__()
self.rec = rcd.Recorder(1,44100,1024,'Line (U24XL with SPDIF I/O)')
self.rec.stream_init(playback = True)
self.playing = True
self.rootwidget = RootWidget(rec = self.rec);
def build(self):
Window.bind(on_request_close=self.on_request_close)
return self.rootwidget
def on_request_close(self, *args,**kwargs):
self.exit_popup(title='Exit', text='Are you sure?')
return False
# Unable to implement an exit confirmation
def exit_popup(self,title,text):
vbox = BoxLayout(orientation='vertical')
vbox.add_widget(Label(text=text))
mybutton = Button(text='OK', size_hint=(0.5,1))
vbox.add_widget(mybutton)
popup = Popup(title=title, content=vbox, size_hint=(None, None), size= (600, 300))
mybutton.bind(on_release = self.stop)
popup.open()
def on_stop(self):
self.rec.stream_stop()
return False
if __name__ == '__main__':
MyApp().run()
|
|
a5e05c411c229b128f56eddd027efc05230778f9
|
06-code-golf/tf-06-bm.py
|
06-code-golf/tf-06-bm.py
|
print (reduce(lambda string, tup: string + tup[0] + ' - ' + str(tup[1]) + '\n', sorted( filter(lambda tup: tup[0] not in open('../stop_words.txt').read().lower().split(','), reduce(lambda word_dict, word: word_dict if (word_dict.__setitem__(word, word_dict.get(word, 0) + 1) if True else None) else word_dict, filter(lambda word: len(word) > 1, (''.join(map(lambda letter: ' ' if ord(letter) not in list(range(ord('a'), ord('z') + 1)) else letter, open('../pride-and-prejudice.txt').read().lower()))).split()), {}).iteritems()), key=lambda tup: tup[1], reverse=True)[0:25], '')) # hole in one?
|
Add a single-line code golf example
|
Add a single-line code golf example
|
Python
|
mit
|
kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,crista/exercises-in-programming-style,crista/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,crista/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,aaron-goshine/exercises-in-programming-style,crista/exercises-in-programming-style,kranthikumar/exercises-in-programming-style,kranthikumar/exercises-in-programming-style
|
Add a single-line code golf example
|
print (reduce(lambda string, tup: string + tup[0] + ' - ' + str(tup[1]) + '\n', sorted( filter(lambda tup: tup[0] not in open('../stop_words.txt').read().lower().split(','), reduce(lambda word_dict, word: word_dict if (word_dict.__setitem__(word, word_dict.get(word, 0) + 1) if True else None) else word_dict, filter(lambda word: len(word) > 1, (''.join(map(lambda letter: ' ' if ord(letter) not in list(range(ord('a'), ord('z') + 1)) else letter, open('../pride-and-prejudice.txt').read().lower()))).split()), {}).iteritems()), key=lambda tup: tup[1], reverse=True)[0:25], '')) # hole in one?
|
<commit_before><commit_msg>Add a single-line code golf example<commit_after>
|
print (reduce(lambda string, tup: string + tup[0] + ' - ' + str(tup[1]) + '\n', sorted( filter(lambda tup: tup[0] not in open('../stop_words.txt').read().lower().split(','), reduce(lambda word_dict, word: word_dict if (word_dict.__setitem__(word, word_dict.get(word, 0) + 1) if True else None) else word_dict, filter(lambda word: len(word) > 1, (''.join(map(lambda letter: ' ' if ord(letter) not in list(range(ord('a'), ord('z') + 1)) else letter, open('../pride-and-prejudice.txt').read().lower()))).split()), {}).iteritems()), key=lambda tup: tup[1], reverse=True)[0:25], '')) # hole in one?
|
Add a single-line code golf exampleprint (reduce(lambda string, tup: string + tup[0] + ' - ' + str(tup[1]) + '\n', sorted( filter(lambda tup: tup[0] not in open('../stop_words.txt').read().lower().split(','), reduce(lambda word_dict, word: word_dict if (word_dict.__setitem__(word, word_dict.get(word, 0) + 1) if True else None) else word_dict, filter(lambda word: len(word) > 1, (''.join(map(lambda letter: ' ' if ord(letter) not in list(range(ord('a'), ord('z') + 1)) else letter, open('../pride-and-prejudice.txt').read().lower()))).split()), {}).iteritems()), key=lambda tup: tup[1], reverse=True)[0:25], '')) # hole in one?
|
<commit_before><commit_msg>Add a single-line code golf example<commit_after>print (reduce(lambda string, tup: string + tup[0] + ' - ' + str(tup[1]) + '\n', sorted( filter(lambda tup: tup[0] not in open('../stop_words.txt').read().lower().split(','), reduce(lambda word_dict, word: word_dict if (word_dict.__setitem__(word, word_dict.get(word, 0) + 1) if True else None) else word_dict, filter(lambda word: len(word) > 1, (''.join(map(lambda letter: ' ' if ord(letter) not in list(range(ord('a'), ord('z') + 1)) else letter, open('../pride-and-prejudice.txt').read().lower()))).split()), {}).iteritems()), key=lambda tup: tup[1], reverse=True)[0:25], '')) # hole in one?
|
|
6fed44e26539e607e60acdc39aa73a69e769ec5f
|
tests/test_repo_not_found.py
|
tests/test_repo_not_found.py
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import main, exceptions
def test_should_raise_error_if_repo_does_not_exist():
with pytest.raises(exceptions.RepositoryNotFound):
main.cookiecutter('definitely-not-a-valid-repo-dir')
|
Implement a test the RepositoryNotFound exception
|
Implement a test the RepositoryNotFound exception
|
Python
|
bsd-3-clause
|
Springerle/cookiecutter,stevepiercy/cookiecutter,willingc/cookiecutter,luzfcb/cookiecutter,luzfcb/cookiecutter,terryjbates/cookiecutter,Springerle/cookiecutter,michaeljoseph/cookiecutter,hackebrot/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,hackebrot/cookiecutter,audreyr/cookiecutter,pjbull/cookiecutter,michaeljoseph/cookiecutter,terryjbates/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter
|
Implement a test the RepositoryNotFound exception
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import main, exceptions
def test_should_raise_error_if_repo_does_not_exist():
with pytest.raises(exceptions.RepositoryNotFound):
main.cookiecutter('definitely-not-a-valid-repo-dir')
|
<commit_before><commit_msg>Implement a test the RepositoryNotFound exception<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from cookiecutter import main, exceptions
def test_should_raise_error_if_repo_does_not_exist():
with pytest.raises(exceptions.RepositoryNotFound):
main.cookiecutter('definitely-not-a-valid-repo-dir')
|
Implement a test the RepositoryNotFound exception# -*- coding: utf-8 -*-
import pytest
from cookiecutter import main, exceptions
def test_should_raise_error_if_repo_does_not_exist():
with pytest.raises(exceptions.RepositoryNotFound):
main.cookiecutter('definitely-not-a-valid-repo-dir')
|
<commit_before><commit_msg>Implement a test the RepositoryNotFound exception<commit_after># -*- coding: utf-8 -*-
import pytest
from cookiecutter import main, exceptions
def test_should_raise_error_if_repo_does_not_exist():
with pytest.raises(exceptions.RepositoryNotFound):
main.cookiecutter('definitely-not-a-valid-repo-dir')
|
|
830a17eca3550c3f4d9f4878d90405b288c10a42
|
official/utils/misc/callstack_sampler.py
|
official/utils/misc/callstack_sampler.py
|
"""A simple Python callstack sampler."""
import contextlib
import datetime
import signal
import traceback
class CallstackSampler(object):
"""A simple signal-based Python callstack sampler.
"""
def __init__(self, interval=None):
self.stacks = []
self.interval = 0.001 if interval is None else interval
def _sample(self, signum, frame):
"""Samples the current stack."""
del signum
stack = traceback.extract_stack(frame)
formatted_stack = []
formatted_stack.append(datetime.datetime.utcnow())
for filename, lineno, function_name, text in stack:
formatted_frame = '{}:{}({})({})'.format(filename, lineno, function_name,
text)
formatted_stack.append(formatted_frame)
self.stacks.append(formatted_stack)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
@contextlib.contextmanager
def profile(self):
signal.signal(signal.SIGVTALRM, self._sample)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
try:
yield
finally:
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
def save(self, fname):
with open(fname, 'w') as f:
for s in self.stacks:
for l in s:
f.write('%s\n' % l)
f.write('\n')
@contextlib.contextmanager
def callstack_sampling(filename, interval=None):
"""Periodically samples the Python callstack.
Args:
filename: the filename
interval: the sampling interval, in seconds. Defaults to 0.001.
Yields:
nothing
"""
sampler = CallstackSampler(interval=interval)
with sampler.profile():
yield
sampler.save(filename)
|
Add a simple signal-based Python callstack sampler for debugging
|
Add a simple signal-based Python callstack sampler for debugging
|
Python
|
apache-2.0
|
alexgorban/models,tombstone/models,tombstone/models,alexgorban/models,tombstone/models,alexgorban/models,tombstone/models,tombstone/models,tombstone/models,alexgorban/models,alexgorban/models
|
Add a simple signal-based Python callstack sampler for debugging
|
"""A simple Python callstack sampler."""
import contextlib
import datetime
import signal
import traceback
class CallstackSampler(object):
"""A simple signal-based Python callstack sampler.
"""
def __init__(self, interval=None):
self.stacks = []
self.interval = 0.001 if interval is None else interval
def _sample(self, signum, frame):
"""Samples the current stack."""
del signum
stack = traceback.extract_stack(frame)
formatted_stack = []
formatted_stack.append(datetime.datetime.utcnow())
for filename, lineno, function_name, text in stack:
formatted_frame = '{}:{}({})({})'.format(filename, lineno, function_name,
text)
formatted_stack.append(formatted_frame)
self.stacks.append(formatted_stack)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
@contextlib.contextmanager
def profile(self):
signal.signal(signal.SIGVTALRM, self._sample)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
try:
yield
finally:
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
def save(self, fname):
with open(fname, 'w') as f:
for s in self.stacks:
for l in s:
f.write('%s\n' % l)
f.write('\n')
@contextlib.contextmanager
def callstack_sampling(filename, interval=None):
"""Periodically samples the Python callstack.
Args:
filename: the filename
interval: the sampling interval, in seconds. Defaults to 0.001.
Yields:
nothing
"""
sampler = CallstackSampler(interval=interval)
with sampler.profile():
yield
sampler.save(filename)
|
<commit_before><commit_msg>Add a simple signal-based Python callstack sampler for debugging<commit_after>
|
"""A simple Python callstack sampler."""
import contextlib
import datetime
import signal
import traceback
class CallstackSampler(object):
"""A simple signal-based Python callstack sampler.
"""
def __init__(self, interval=None):
self.stacks = []
self.interval = 0.001 if interval is None else interval
def _sample(self, signum, frame):
"""Samples the current stack."""
del signum
stack = traceback.extract_stack(frame)
formatted_stack = []
formatted_stack.append(datetime.datetime.utcnow())
for filename, lineno, function_name, text in stack:
formatted_frame = '{}:{}({})({})'.format(filename, lineno, function_name,
text)
formatted_stack.append(formatted_frame)
self.stacks.append(formatted_stack)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
@contextlib.contextmanager
def profile(self):
signal.signal(signal.SIGVTALRM, self._sample)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
try:
yield
finally:
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
def save(self, fname):
with open(fname, 'w') as f:
for s in self.stacks:
for l in s:
f.write('%s\n' % l)
f.write('\n')
@contextlib.contextmanager
def callstack_sampling(filename, interval=None):
"""Periodically samples the Python callstack.
Args:
filename: the filename
interval: the sampling interval, in seconds. Defaults to 0.001.
Yields:
nothing
"""
sampler = CallstackSampler(interval=interval)
with sampler.profile():
yield
sampler.save(filename)
|
Add a simple signal-based Python callstack sampler for debugging"""A simple Python callstack sampler."""
import contextlib
import datetime
import signal
import traceback
class CallstackSampler(object):
"""A simple signal-based Python callstack sampler.
"""
def __init__(self, interval=None):
self.stacks = []
self.interval = 0.001 if interval is None else interval
def _sample(self, signum, frame):
"""Samples the current stack."""
del signum
stack = traceback.extract_stack(frame)
formatted_stack = []
formatted_stack.append(datetime.datetime.utcnow())
for filename, lineno, function_name, text in stack:
formatted_frame = '{}:{}({})({})'.format(filename, lineno, function_name,
text)
formatted_stack.append(formatted_frame)
self.stacks.append(formatted_stack)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
@contextlib.contextmanager
def profile(self):
signal.signal(signal.SIGVTALRM, self._sample)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
try:
yield
finally:
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
def save(self, fname):
with open(fname, 'w') as f:
for s in self.stacks:
for l in s:
f.write('%s\n' % l)
f.write('\n')
@contextlib.contextmanager
def callstack_sampling(filename, interval=None):
"""Periodically samples the Python callstack.
Args:
filename: the filename
interval: the sampling interval, in seconds. Defaults to 0.001.
Yields:
nothing
"""
sampler = CallstackSampler(interval=interval)
with sampler.profile():
yield
sampler.save(filename)
|
<commit_before><commit_msg>Add a simple signal-based Python callstack sampler for debugging<commit_after>"""A simple Python callstack sampler."""
import contextlib
import datetime
import signal
import traceback
class CallstackSampler(object):
"""A simple signal-based Python callstack sampler.
"""
def __init__(self, interval=None):
self.stacks = []
self.interval = 0.001 if interval is None else interval
def _sample(self, signum, frame):
"""Samples the current stack."""
del signum
stack = traceback.extract_stack(frame)
formatted_stack = []
formatted_stack.append(datetime.datetime.utcnow())
for filename, lineno, function_name, text in stack:
formatted_frame = '{}:{}({})({})'.format(filename, lineno, function_name,
text)
formatted_stack.append(formatted_frame)
self.stacks.append(formatted_stack)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
@contextlib.contextmanager
def profile(self):
signal.signal(signal.SIGVTALRM, self._sample)
signal.setitimer(signal.ITIMER_VIRTUAL, self.interval, 0)
try:
yield
finally:
signal.setitimer(signal.ITIMER_VIRTUAL, 0)
def save(self, fname):
with open(fname, 'w') as f:
for s in self.stacks:
for l in s:
f.write('%s\n' % l)
f.write('\n')
@contextlib.contextmanager
def callstack_sampling(filename, interval=None):
"""Periodically samples the Python callstack.
Args:
filename: the filename
interval: the sampling interval, in seconds. Defaults to 0.001.
Yields:
nothing
"""
sampler = CallstackSampler(interval=interval)
with sampler.profile():
yield
sampler.save(filename)
|
|
ca056b16d5a39d5c8e95d8f2fe4d8678a0dfea0c
|
tests/test_corruption.py
|
tests/test_corruption.py
|
from io import BytesIO
from unittest import TestCase
import bigjson
MISSING_OPEN_QUOTE_JSON_FILE = b"""
{
"object": {
"x": y"
}
}
"""
CORRUPT_BACKSLASH_ENCODING_JSON_FILE = b"""
{
"string": "\qblah"
}
"""
MISSING_DIGIT_AFTER_DOT_JSON_FILE = b"""
{
"number": 14.
}
"""
class TestCorruption(TestCase):
def test_missing_open_quote(self):
file = BytesIO(MISSING_OPEN_QUOTE_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected bytes! Value 'y' Position 32")
def test_corrupt_backslash_encoding(self):
file = BytesIO(CORRUPT_BACKSLASH_ENCODING_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected \\q in backslash encoding! Position 19")
def test_missing_digit_after_dot(self):
file = BytesIO(MISSING_DIGIT_AFTER_DOT_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Expected digit after dot! Position 21")
|
Add tests for improved exception messages
|
Add tests for improved exception messages
|
Python
|
mit
|
henu/bigjson
|
Add tests for improved exception messages
|
from io import BytesIO
from unittest import TestCase
import bigjson
MISSING_OPEN_QUOTE_JSON_FILE = b"""
{
"object": {
"x": y"
}
}
"""
CORRUPT_BACKSLASH_ENCODING_JSON_FILE = b"""
{
"string": "\qblah"
}
"""
MISSING_DIGIT_AFTER_DOT_JSON_FILE = b"""
{
"number": 14.
}
"""
class TestCorruption(TestCase):
def test_missing_open_quote(self):
file = BytesIO(MISSING_OPEN_QUOTE_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected bytes! Value 'y' Position 32")
def test_corrupt_backslash_encoding(self):
file = BytesIO(CORRUPT_BACKSLASH_ENCODING_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected \\q in backslash encoding! Position 19")
def test_missing_digit_after_dot(self):
file = BytesIO(MISSING_DIGIT_AFTER_DOT_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Expected digit after dot! Position 21")
|
<commit_before><commit_msg>Add tests for improved exception messages<commit_after>
|
from io import BytesIO
from unittest import TestCase
import bigjson
MISSING_OPEN_QUOTE_JSON_FILE = b"""
{
"object": {
"x": y"
}
}
"""
CORRUPT_BACKSLASH_ENCODING_JSON_FILE = b"""
{
"string": "\qblah"
}
"""
MISSING_DIGIT_AFTER_DOT_JSON_FILE = b"""
{
"number": 14.
}
"""
class TestCorruption(TestCase):
def test_missing_open_quote(self):
file = BytesIO(MISSING_OPEN_QUOTE_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected bytes! Value 'y' Position 32")
def test_corrupt_backslash_encoding(self):
file = BytesIO(CORRUPT_BACKSLASH_ENCODING_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected \\q in backslash encoding! Position 19")
def test_missing_digit_after_dot(self):
file = BytesIO(MISSING_DIGIT_AFTER_DOT_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Expected digit after dot! Position 21")
|
Add tests for improved exception messagesfrom io import BytesIO
from unittest import TestCase
import bigjson
MISSING_OPEN_QUOTE_JSON_FILE = b"""
{
"object": {
"x": y"
}
}
"""
CORRUPT_BACKSLASH_ENCODING_JSON_FILE = b"""
{
"string": "\qblah"
}
"""
MISSING_DIGIT_AFTER_DOT_JSON_FILE = b"""
{
"number": 14.
}
"""
class TestCorruption(TestCase):
def test_missing_open_quote(self):
file = BytesIO(MISSING_OPEN_QUOTE_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected bytes! Value 'y' Position 32")
def test_corrupt_backslash_encoding(self):
file = BytesIO(CORRUPT_BACKSLASH_ENCODING_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected \\q in backslash encoding! Position 19")
def test_missing_digit_after_dot(self):
file = BytesIO(MISSING_DIGIT_AFTER_DOT_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Expected digit after dot! Position 21")
|
<commit_before><commit_msg>Add tests for improved exception messages<commit_after>from io import BytesIO
from unittest import TestCase
import bigjson
MISSING_OPEN_QUOTE_JSON_FILE = b"""
{
"object": {
"x": y"
}
}
"""
CORRUPT_BACKSLASH_ENCODING_JSON_FILE = b"""
{
"string": "\qblah"
}
"""
MISSING_DIGIT_AFTER_DOT_JSON_FILE = b"""
{
"number": 14.
}
"""
class TestCorruption(TestCase):
def test_missing_open_quote(self):
file = BytesIO(MISSING_OPEN_QUOTE_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected bytes! Value 'y' Position 32")
def test_corrupt_backslash_encoding(self):
file = BytesIO(CORRUPT_BACKSLASH_ENCODING_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Unexpected \\q in backslash encoding! Position 19")
def test_missing_digit_after_dot(self):
file = BytesIO(MISSING_DIGIT_AFTER_DOT_JSON_FILE)
data = bigjson.load(file)
with self.assertRaises(Exception) as e:
_ = len(data)
self.assertEqual(e.exception.args[0], "Expected digit after dot! Position 21")
|
|
2c41a6cf7c591de5a9e7a990ee9904c3949c3b2e
|
contacts/management/commands/populate_test_data.py
|
contacts/management/commands/populate_test_data.py
|
from django.core.management.base import BaseCommand
from contacts.models import Book, Contact, ContactField
class Command(BaseCommand):
def handle(self, *args, **kwargs):
book = Book.objects.get(id=1)
for n in range(40):
contact = Contact.objects.create(
book=book,
name='Test {}'.format(n),
)
for m in range(10):
ContactField.objects.create(
contact=contact,
kind='email',
label='Test {}'.format(m),
value='pjj+{}@pjj.pjj'.format(m),
)
|
Add a command to populate test data.
|
Add a command to populate test data.
|
Python
|
mit
|
phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts
|
Add a command to populate test data.
|
from django.core.management.base import BaseCommand
from contacts.models import Book, Contact, ContactField
class Command(BaseCommand):
def handle(self, *args, **kwargs):
book = Book.objects.get(id=1)
for n in range(40):
contact = Contact.objects.create(
book=book,
name='Test {}'.format(n),
)
for m in range(10):
ContactField.objects.create(
contact=contact,
kind='email',
label='Test {}'.format(m),
value='pjj+{}@pjj.pjj'.format(m),
)
|
<commit_before><commit_msg>Add a command to populate test data.<commit_after>
|
from django.core.management.base import BaseCommand
from contacts.models import Book, Contact, ContactField
class Command(BaseCommand):
def handle(self, *args, **kwargs):
book = Book.objects.get(id=1)
for n in range(40):
contact = Contact.objects.create(
book=book,
name='Test {}'.format(n),
)
for m in range(10):
ContactField.objects.create(
contact=contact,
kind='email',
label='Test {}'.format(m),
value='pjj+{}@pjj.pjj'.format(m),
)
|
Add a command to populate test data.from django.core.management.base import BaseCommand
from contacts.models import Book, Contact, ContactField
class Command(BaseCommand):
def handle(self, *args, **kwargs):
book = Book.objects.get(id=1)
for n in range(40):
contact = Contact.objects.create(
book=book,
name='Test {}'.format(n),
)
for m in range(10):
ContactField.objects.create(
contact=contact,
kind='email',
label='Test {}'.format(m),
value='pjj+{}@pjj.pjj'.format(m),
)
|
<commit_before><commit_msg>Add a command to populate test data.<commit_after>from django.core.management.base import BaseCommand
from contacts.models import Book, Contact, ContactField
class Command(BaseCommand):
def handle(self, *args, **kwargs):
book = Book.objects.get(id=1)
for n in range(40):
contact = Contact.objects.create(
book=book,
name='Test {}'.format(n),
)
for m in range(10):
ContactField.objects.create(
contact=contact,
kind='email',
label='Test {}'.format(m),
value='pjj+{}@pjj.pjj'.format(m),
)
|
|
a04db731db1738b651a2cb9b5dacbe24e02e02ca
|
tests/test_server.py
|
tests/test_server.py
|
"""
The tests provided in this module make sure that the server
instance behaves as expected.
"""
import pytest
from saltyrtc import server
class TestServer:
@pytest.mark.asyncio
def test_repeated_permanent_keys(self, server_permanent_keys):
"""
Ensure the server does not accept repeated keys.
"""
keys = server_permanent_keys + [server_permanent_keys[1]]
with pytest.raises(server.ServerKeyError) as exc_info:
yield from server.serve(None, keys)
assert 'Repeated permanent keys' in str(exc_info.value)
|
Add test for repeated permanent keys on a Server class instance
|
Add test for repeated permanent keys on a Server class instance
|
Python
|
mit
|
saltyrtc/saltyrtc-server-python,saltyrtc/saltyrtc-server-python
|
Add test for repeated permanent keys on a Server class instance
|
"""
The tests provided in this module make sure that the server
instance behaves as expected.
"""
import pytest
from saltyrtc import server
class TestServer:
@pytest.mark.asyncio
def test_repeated_permanent_keys(self, server_permanent_keys):
"""
Ensure the server does not accept repeated keys.
"""
keys = server_permanent_keys + [server_permanent_keys[1]]
with pytest.raises(server.ServerKeyError) as exc_info:
yield from server.serve(None, keys)
assert 'Repeated permanent keys' in str(exc_info.value)
|
<commit_before><commit_msg>Add test for repeated permanent keys on a Server class instance<commit_after>
|
"""
The tests provided in this module make sure that the server
instance behaves as expected.
"""
import pytest
from saltyrtc import server
class TestServer:
@pytest.mark.asyncio
def test_repeated_permanent_keys(self, server_permanent_keys):
"""
Ensure the server does not accept repeated keys.
"""
keys = server_permanent_keys + [server_permanent_keys[1]]
with pytest.raises(server.ServerKeyError) as exc_info:
yield from server.serve(None, keys)
assert 'Repeated permanent keys' in str(exc_info.value)
|
Add test for repeated permanent keys on a Server class instance"""
The tests provided in this module make sure that the server
instance behaves as expected.
"""
import pytest
from saltyrtc import server
class TestServer:
@pytest.mark.asyncio
def test_repeated_permanent_keys(self, server_permanent_keys):
"""
Ensure the server does not accept repeated keys.
"""
keys = server_permanent_keys + [server_permanent_keys[1]]
with pytest.raises(server.ServerKeyError) as exc_info:
yield from server.serve(None, keys)
assert 'Repeated permanent keys' in str(exc_info.value)
|
<commit_before><commit_msg>Add test for repeated permanent keys on a Server class instance<commit_after>"""
The tests provided in this module make sure that the server
instance behaves as expected.
"""
import pytest
from saltyrtc import server
class TestServer:
@pytest.mark.asyncio
def test_repeated_permanent_keys(self, server_permanent_keys):
"""
Ensure the server does not accept repeated keys.
"""
keys = server_permanent_keys + [server_permanent_keys[1]]
with pytest.raises(server.ServerKeyError) as exc_info:
yield from server.serve(None, keys)
assert 'Repeated permanent keys' in str(exc_info.value)
|
|
f0d1f7d5ff57b3359aa26f17297e6de9ff533eb1
|
examples/example_spline.py
|
examples/example_spline.py
|
"""
This test file is meant for developing purposes. Providing an easy method to
test the functioning of Pastas during development.
"""
import pandas as pd
import pastas as ps
ps.set_log_level("ERROR")
# read observations and create the time series model
obs = pd.read_csv("data/head_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Create the time series model
# read weather data
rain = pd.read_csv("data/rain_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
evap = pd.read_csv("data/evap_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Solve with a Gamma response function
ml = ps.Model(obs, name="Gamma")
sm = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Gamma,
name='recharge')
ml.add_stressmodel(sm)
ml.solve(noise=False)
# Solve with a Spline response function
ml2 = ps.Model(obs, name="Spline")
sm2 = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Spline,
name='recharge')
ml2.add_stressmodel(sm2)
ml2.solve(noise=False)
# Compare both models
ps.plots.compare([ml, ml2])
|
Add an example to compare Gamma and Spline
|
Add an example to compare Gamma and Spline
|
Python
|
mit
|
pastas/pasta,pastas/pastas
|
Add an example to compare Gamma and Spline
|
"""
This test file is meant for developing purposes. Providing an easy method to
test the functioning of Pastas during development.
"""
import pandas as pd
import pastas as ps
ps.set_log_level("ERROR")
# read observations and create the time series model
obs = pd.read_csv("data/head_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Create the time series model
# read weather data
rain = pd.read_csv("data/rain_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
evap = pd.read_csv("data/evap_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Solve with a Gamma response function
ml = ps.Model(obs, name="Gamma")
sm = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Gamma,
name='recharge')
ml.add_stressmodel(sm)
ml.solve(noise=False)
# Solve with a Spline response function
ml2 = ps.Model(obs, name="Spline")
sm2 = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Spline,
name='recharge')
ml2.add_stressmodel(sm2)
ml2.solve(noise=False)
# Compare both models
ps.plots.compare([ml, ml2])
|
<commit_before><commit_msg>Add an example to compare Gamma and Spline<commit_after>
|
"""
This test file is meant for developing purposes. Providing an easy method to
test the functioning of Pastas during development.
"""
import pandas as pd
import pastas as ps
ps.set_log_level("ERROR")
# read observations and create the time series model
obs = pd.read_csv("data/head_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Create the time series model
# read weather data
rain = pd.read_csv("data/rain_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
evap = pd.read_csv("data/evap_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Solve with a Gamma response function
ml = ps.Model(obs, name="Gamma")
sm = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Gamma,
name='recharge')
ml.add_stressmodel(sm)
ml.solve(noise=False)
# Solve with a Spline response function
ml2 = ps.Model(obs, name="Spline")
sm2 = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Spline,
name='recharge')
ml2.add_stressmodel(sm2)
ml2.solve(noise=False)
# Compare both models
ps.plots.compare([ml, ml2])
|
Add an example to compare Gamma and Spline"""
This test file is meant for developing purposes. Providing an easy method to
test the functioning of Pastas during development.
"""
import pandas as pd
import pastas as ps
ps.set_log_level("ERROR")
# read observations and create the time series model
obs = pd.read_csv("data/head_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Create the time series model
# read weather data
rain = pd.read_csv("data/rain_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
evap = pd.read_csv("data/evap_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Solve with a Gamma response function
ml = ps.Model(obs, name="Gamma")
sm = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Gamma,
name='recharge')
ml.add_stressmodel(sm)
ml.solve(noise=False)
# Solve with a Spline response function
ml2 = ps.Model(obs, name="Spline")
sm2 = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Spline,
name='recharge')
ml2.add_stressmodel(sm2)
ml2.solve(noise=False)
# Compare both models
ps.plots.compare([ml, ml2])
|
<commit_before><commit_msg>Add an example to compare Gamma and Spline<commit_after>"""
This test file is meant for developing purposes. Providing an easy method to
test the functioning of Pastas during development.
"""
import pandas as pd
import pastas as ps
ps.set_log_level("ERROR")
# read observations and create the time series model
obs = pd.read_csv("data/head_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Create the time series model
# read weather data
rain = pd.read_csv("data/rain_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
evap = pd.read_csv("data/evap_nb1.csv", index_col=0, parse_dates=True,
squeeze=True)
# Solve with a Gamma response function
ml = ps.Model(obs, name="Gamma")
sm = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Gamma,
name='recharge')
ml.add_stressmodel(sm)
ml.solve(noise=False)
# Solve with a Spline response function
ml2 = ps.Model(obs, name="Spline")
sm2 = ps.RechargeModel(prec=rain, evap=evap, rfunc=ps.Spline,
name='recharge')
ml2.add_stressmodel(sm2)
ml2.solve(noise=False)
# Compare both models
ps.plots.compare([ml, ml2])
|
|
374dc84359bb41bbc94116b3a7662255fa5d147a
|
py/super-pow.py
|
py/super-pow.py
|
class Solution(object):
def superPow(self, a, b):
"""
:type a: int
:type b: List[int]
:rtype: int
"""
MODULI = 1337
if a == 0:
return 0
ans = 1
for n in b:
ans = pow(ans, 10, MODULI)
ans = (ans * pow(a, n, MODULI)) % MODULI
return ans
|
Add py solution for 372. Super Pow
|
Add py solution for 372. Super Pow
372. Super Pow: https://leetcode.com/problems/super-pow/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 372. Super Pow
372. Super Pow: https://leetcode.com/problems/super-pow/
|
class Solution(object):
def superPow(self, a, b):
"""
:type a: int
:type b: List[int]
:rtype: int
"""
MODULI = 1337
if a == 0:
return 0
ans = 1
for n in b:
ans = pow(ans, 10, MODULI)
ans = (ans * pow(a, n, MODULI)) % MODULI
return ans
|
<commit_before><commit_msg>Add py solution for 372. Super Pow
372. Super Pow: https://leetcode.com/problems/super-pow/<commit_after>
|
class Solution(object):
def superPow(self, a, b):
"""
:type a: int
:type b: List[int]
:rtype: int
"""
MODULI = 1337
if a == 0:
return 0
ans = 1
for n in b:
ans = pow(ans, 10, MODULI)
ans = (ans * pow(a, n, MODULI)) % MODULI
return ans
|
Add py solution for 372. Super Pow
372. Super Pow: https://leetcode.com/problems/super-pow/class Solution(object):
def superPow(self, a, b):
"""
:type a: int
:type b: List[int]
:rtype: int
"""
MODULI = 1337
if a == 0:
return 0
ans = 1
for n in b:
ans = pow(ans, 10, MODULI)
ans = (ans * pow(a, n, MODULI)) % MODULI
return ans
|
<commit_before><commit_msg>Add py solution for 372. Super Pow
372. Super Pow: https://leetcode.com/problems/super-pow/<commit_after>class Solution(object):
def superPow(self, a, b):
"""
:type a: int
:type b: List[int]
:rtype: int
"""
MODULI = 1337
if a == 0:
return 0
ans = 1
for n in b:
ans = pow(ans, 10, MODULI)
ans = (ans * pow(a, n, MODULI)) % MODULI
return ans
|
|
a69726510284bbb1b3e53de45024c0920124ecca
|
src/ggrc_basic_permissions/migrations/versions/20131206192611_1f865f61312_remove_delete_progra.py
|
src/ggrc_basic_permissions/migrations/versions/20131206192611_1f865f61312_remove_delete_progra.py
|
"""Remove delete program permission from ProgramEditor.
Revision ID: 1f865f61312
Revises: eab1d38baee
Create Date: 2013-12-06 19:26:11.875923
"""
# revision identifiers, used by Alembic.
revision = '1f865f61312'
down_revision = 'eab1d38baee'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].remove('Program')
update_role_permissions('ProgramEditor', permissions)
def downgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].append('Program')
update_role_permissions('ProgramEditor', permissions)
|
Remove delete permission for Program resources from the ProgramEditor role.
|
Remove delete permission for Program resources from the ProgramEditor
role.
|
Python
|
apache-2.0
|
j0gurt/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,josthkko/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,hasanalom/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core
|
Remove delete permission for Program resources from the ProgramEditor
role.
|
"""Remove delete program permission from ProgramEditor.
Revision ID: 1f865f61312
Revises: eab1d38baee
Create Date: 2013-12-06 19:26:11.875923
"""
# revision identifiers, used by Alembic.
revision = '1f865f61312'
down_revision = 'eab1d38baee'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].remove('Program')
update_role_permissions('ProgramEditor', permissions)
def downgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].append('Program')
update_role_permissions('ProgramEditor', permissions)
|
<commit_before><commit_msg>Remove delete permission for Program resources from the ProgramEditor
role.<commit_after>
|
"""Remove delete program permission from ProgramEditor.
Revision ID: 1f865f61312
Revises: eab1d38baee
Create Date: 2013-12-06 19:26:11.875923
"""
# revision identifiers, used by Alembic.
revision = '1f865f61312'
down_revision = 'eab1d38baee'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].remove('Program')
update_role_permissions('ProgramEditor', permissions)
def downgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].append('Program')
update_role_permissions('ProgramEditor', permissions)
|
Remove delete permission for Program resources from the ProgramEditor
role.
"""Remove delete program permission from ProgramEditor.
Revision ID: 1f865f61312
Revises: eab1d38baee
Create Date: 2013-12-06 19:26:11.875923
"""
# revision identifiers, used by Alembic.
revision = '1f865f61312'
down_revision = 'eab1d38baee'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].remove('Program')
update_role_permissions('ProgramEditor', permissions)
def downgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].append('Program')
update_role_permissions('ProgramEditor', permissions)
|
<commit_before><commit_msg>Remove delete permission for Program resources from the ProgramEditor
role.<commit_after>
"""Remove delete program permission from ProgramEditor.
Revision ID: 1f865f61312
Revises: eab1d38baee
Create Date: 2013-12-06 19:26:11.875923
"""
# revision identifiers, used by Alembic.
revision = '1f865f61312'
down_revision = 'eab1d38baee'
import sqlalchemy as sa
from alembic import op
from datetime import datetime
from sqlalchemy.sql import table, column, select
import json
roles_table = table('roles',
column('id', sa.Integer),
column('name', sa.String),
column('permissions_json', sa.String)
)
def get_role_permissions(role):
connection = op.get_bind()
role = connection.execute(
select([roles_table.c.permissions_json])\
.where(roles_table.c.name == role)).fetchone()
return json.loads(role.permissions_json)
def update_role_permissions(role, permissions):
op.execute(roles_table\
.update()\
.values(permissions_json = json.dumps(permissions))\
.where(roles_table.c.name == role))
def upgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].remove('Program')
update_role_permissions('ProgramEditor', permissions)
def downgrade():
permissions = get_role_permissions('ProgramEditor')
permissions['delete'].append('Program')
update_role_permissions('ProgramEditor', permissions)
|
|
3d83caa99fd861290ac21729e8ef1af1cb7a17bf
|
code/get_licenses.py
|
code/get_licenses.py
|
#!/usr/bin/python
import requests
import json
import ast
def parse_dataset_metadata(dataset):
if 'rights' in dataset.keys():
rights = dataset['rights']
else:
rights = 'not supplied'
return {'dataset_key': dataset['key'], 'rights': rights}
def get_gbif_datasets(limit, offset):
params = {'limit': limit, 'offset': offset}
r = requests.get('http://api.gbif.org/v0.9/dataset/', params=params)
request_result = r.json()['results']
return request_result
results = []
more_results_to_find = True
offset = 0
limit = 20
while more_results_to_find:
datasets = get_gbif_datasets(limit, offset)
for dataset in datasets:
print parse_dataset_metadata(dataset)
offset += 20
if len(datasets) == 0:
more_results_to_find = False
|
Add script to fetch licenses
|
Add script to fetch licenses
|
Python
|
mit
|
Datafable/gbif-data-licenses,Datafable/gbif-data-licenses,Datafable/gbif-data-licenses
|
Add script to fetch licenses
|
#!/usr/bin/python
import requests
import json
import ast
def parse_dataset_metadata(dataset):
if 'rights' in dataset.keys():
rights = dataset['rights']
else:
rights = 'not supplied'
return {'dataset_key': dataset['key'], 'rights': rights}
def get_gbif_datasets(limit, offset):
params = {'limit': limit, 'offset': offset}
r = requests.get('http://api.gbif.org/v0.9/dataset/', params=params)
request_result = r.json()['results']
return request_result
results = []
more_results_to_find = True
offset = 0
limit = 20
while more_results_to_find:
datasets = get_gbif_datasets(limit, offset)
for dataset in datasets:
print parse_dataset_metadata(dataset)
offset += 20
if len(datasets) == 0:
more_results_to_find = False
|
<commit_before><commit_msg>Add script to fetch licenses<commit_after>
|
#!/usr/bin/python
import requests
import json
import ast
def parse_dataset_metadata(dataset):
if 'rights' in dataset.keys():
rights = dataset['rights']
else:
rights = 'not supplied'
return {'dataset_key': dataset['key'], 'rights': rights}
def get_gbif_datasets(limit, offset):
params = {'limit': limit, 'offset': offset}
r = requests.get('http://api.gbif.org/v0.9/dataset/', params=params)
request_result = r.json()['results']
return request_result
results = []
more_results_to_find = True
offset = 0
limit = 20
while more_results_to_find:
datasets = get_gbif_datasets(limit, offset)
for dataset in datasets:
print parse_dataset_metadata(dataset)
offset += 20
if len(datasets) == 0:
more_results_to_find = False
|
Add script to fetch licenses#!/usr/bin/python
import requests
import json
import ast
def parse_dataset_metadata(dataset):
if 'rights' in dataset.keys():
rights = dataset['rights']
else:
rights = 'not supplied'
return {'dataset_key': dataset['key'], 'rights': rights}
def get_gbif_datasets(limit, offset):
params = {'limit': limit, 'offset': offset}
r = requests.get('http://api.gbif.org/v0.9/dataset/', params=params)
request_result = r.json()['results']
return request_result
results = []
more_results_to_find = True
offset = 0
limit = 20
while more_results_to_find:
datasets = get_gbif_datasets(limit, offset)
for dataset in datasets:
print parse_dataset_metadata(dataset)
offset += 20
if len(datasets) == 0:
more_results_to_find = False
|
<commit_before><commit_msg>Add script to fetch licenses<commit_after>#!/usr/bin/python
import requests
import json
import ast
def parse_dataset_metadata(dataset):
if 'rights' in dataset.keys():
rights = dataset['rights']
else:
rights = 'not supplied'
return {'dataset_key': dataset['key'], 'rights': rights}
def get_gbif_datasets(limit, offset):
params = {'limit': limit, 'offset': offset}
r = requests.get('http://api.gbif.org/v0.9/dataset/', params=params)
request_result = r.json()['results']
return request_result
results = []
more_results_to_find = True
offset = 0
limit = 20
while more_results_to_find:
datasets = get_gbif_datasets(limit, offset)
for dataset in datasets:
print parse_dataset_metadata(dataset)
offset += 20
if len(datasets) == 0:
more_results_to_find = False
|
|
ab48df1df241a17c01730a368178abfbe4d94bcc
|
qa/rpc-tests/qtum-call-empty-contract.py
|
qa/rpc-tests/qtum-call-empty-contract.py
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import sys
class QtumCallContractStateNotRevertedTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
def run_test(self):
connect_nodes_bi(self.nodes, 0, 1)
self.nodes[0].generate(600)
self.sync_all()
self.nodes[1].generate(600)
self.sync_all()
contract_address = self.nodes[0].createcontract("00")['address']
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].callcontract(contract_address, "00")
self.nodes[1].createcontract("00")
self.nodes[1].generate(1)
time.sleep(1)
assert_equal(self.nodes[0].getblockcount(), self.nodes[1].getblockcount())
assert_equal(self.nodes[0].listcontracts(), self.nodes[1].listcontracts())
if __name__ == '__main__':
QtumCallContractStateNotRevertedTest().main()
|
Add call empty contract python test
|
Add call empty contract python test
|
Python
|
mit
|
qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum,qtumproject/qtum
|
Add call empty contract python test
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import sys
class QtumCallContractStateNotRevertedTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
def run_test(self):
connect_nodes_bi(self.nodes, 0, 1)
self.nodes[0].generate(600)
self.sync_all()
self.nodes[1].generate(600)
self.sync_all()
contract_address = self.nodes[0].createcontract("00")['address']
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].callcontract(contract_address, "00")
self.nodes[1].createcontract("00")
self.nodes[1].generate(1)
time.sleep(1)
assert_equal(self.nodes[0].getblockcount(), self.nodes[1].getblockcount())
assert_equal(self.nodes[0].listcontracts(), self.nodes[1].listcontracts())
if __name__ == '__main__':
QtumCallContractStateNotRevertedTest().main()
|
<commit_before><commit_msg>Add call empty contract python test<commit_after>
|
#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import sys
class QtumCallContractStateNotRevertedTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
def run_test(self):
connect_nodes_bi(self.nodes, 0, 1)
self.nodes[0].generate(600)
self.sync_all()
self.nodes[1].generate(600)
self.sync_all()
contract_address = self.nodes[0].createcontract("00")['address']
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].callcontract(contract_address, "00")
self.nodes[1].createcontract("00")
self.nodes[1].generate(1)
time.sleep(1)
assert_equal(self.nodes[0].getblockcount(), self.nodes[1].getblockcount())
assert_equal(self.nodes[0].listcontracts(), self.nodes[1].listcontracts())
if __name__ == '__main__':
QtumCallContractStateNotRevertedTest().main()
|
Add call empty contract python test#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import sys
class QtumCallContractStateNotRevertedTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
def run_test(self):
connect_nodes_bi(self.nodes, 0, 1)
self.nodes[0].generate(600)
self.sync_all()
self.nodes[1].generate(600)
self.sync_all()
contract_address = self.nodes[0].createcontract("00")['address']
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].callcontract(contract_address, "00")
self.nodes[1].createcontract("00")
self.nodes[1].generate(1)
time.sleep(1)
assert_equal(self.nodes[0].getblockcount(), self.nodes[1].getblockcount())
assert_equal(self.nodes[0].listcontracts(), self.nodes[1].listcontracts())
if __name__ == '__main__':
QtumCallContractStateNotRevertedTest().main()
|
<commit_before><commit_msg>Add call empty contract python test<commit_after>#!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.script import *
from test_framework.mininode import *
import sys
class QtumCallContractStateNotRevertedTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 2
def setup_network(self, split=False):
self.nodes = start_nodes(self.num_nodes, self.options.tmpdir)
self.is_network_split = False
def run_test(self):
connect_nodes_bi(self.nodes, 0, 1)
self.nodes[0].generate(600)
self.sync_all()
self.nodes[1].generate(600)
self.sync_all()
contract_address = self.nodes[0].createcontract("00")['address']
self.nodes[0].generate(1)
self.sync_all()
self.nodes[0].callcontract(contract_address, "00")
self.nodes[1].createcontract("00")
self.nodes[1].generate(1)
time.sleep(1)
assert_equal(self.nodes[0].getblockcount(), self.nodes[1].getblockcount())
assert_equal(self.nodes[0].listcontracts(), self.nodes[1].listcontracts())
if __name__ == '__main__':
QtumCallContractStateNotRevertedTest().main()
|
|
ef0d5ba38a8f56cb32a03aa3bacb2fe7e73c4eea
|
alg_tower_of_hanoi.py
|
alg_tower_of_hanoi.py
|
"""The tower of Hanoi."""
from __future__ import print_function
def move_towers(height, from_pole, to_pole, with_pole):
move_towers(height - 1, from_pole, with_pole, to_pole)
move_disk(from_pole, to_pole)
move_towers(height - 1, with_pole, to_pole, from_pole)
def move_disk(from_pole, to_pole):
print('Moving disk from {0} to {1}'
.format(from_pole, to_pole))
|
Complete alg: tower of hanoi
|
Complete alg: tower of hanoi
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
Complete alg: tower of hanoi
|
"""The tower of Hanoi."""
from __future__ import print_function
def move_towers(height, from_pole, to_pole, with_pole):
move_towers(height - 1, from_pole, with_pole, to_pole)
move_disk(from_pole, to_pole)
move_towers(height - 1, with_pole, to_pole, from_pole)
def move_disk(from_pole, to_pole):
print('Moving disk from {0} to {1}'
.format(from_pole, to_pole))
|
<commit_before><commit_msg>Complete alg: tower of hanoi<commit_after>
|
"""The tower of Hanoi."""
from __future__ import print_function
def move_towers(height, from_pole, to_pole, with_pole):
move_towers(height - 1, from_pole, with_pole, to_pole)
move_disk(from_pole, to_pole)
move_towers(height - 1, with_pole, to_pole, from_pole)
def move_disk(from_pole, to_pole):
print('Moving disk from {0} to {1}'
.format(from_pole, to_pole))
|
Complete alg: tower of hanoi"""The tower of Hanoi."""
from __future__ import print_function
def move_towers(height, from_pole, to_pole, with_pole):
move_towers(height - 1, from_pole, with_pole, to_pole)
move_disk(from_pole, to_pole)
move_towers(height - 1, with_pole, to_pole, from_pole)
def move_disk(from_pole, to_pole):
print('Moving disk from {0} to {1}'
.format(from_pole, to_pole))
|
<commit_before><commit_msg>Complete alg: tower of hanoi<commit_after>"""The tower of Hanoi."""
from __future__ import print_function
def move_towers(height, from_pole, to_pole, with_pole):
move_towers(height - 1, from_pole, with_pole, to_pole)
move_disk(from_pole, to_pole)
move_towers(height - 1, with_pole, to_pole, from_pole)
def move_disk(from_pole, to_pole):
print('Moving disk from {0} to {1}'
.format(from_pole, to_pole))
|
|
b356cd07fdeb249ff91160a0961b9cc660c7ae01
|
app/ml_models/affect_ai_test.py
|
app/ml_models/affect_ai_test.py
|
import affect_ai
# Test that an affect_AI object gets created correctly
# Test that an affect_AI object can be trained, and builds vocabulary correctly
# Test that an affect_AI object correctly scores samples
# words: foo, bar, baz, goo, car, caz, hoo, dar, daz, ioo, ear, eaz, loo, far, faz; corpora: happiness 1, satisfaction 2, elation 3
|
Add test file for affect_ai class
|
chore: Add test file for affect_ai class
|
Python
|
mit
|
OmegaHorizonResearch/agile-analyst
|
chore: Add test file for affect_ai class
|
import affect_ai
# Test that an affect_AI object gets created correctly
# Test that an affect_AI object can be trained, and builds vocabulary correctly
# Test that an affect_AI object correctly scores samples
# words: foo, bar, baz, goo, car, caz, hoo, dar, daz, ioo, ear, eaz, loo, far, faz; corpora: happiness 1, satisfaction 2, elation 3
|
<commit_before><commit_msg>chore: Add test file for affect_ai class<commit_after>
|
import affect_ai
# Test that an affect_AI object gets created correctly
# Test that an affect_AI object can be trained, and builds vocabulary correctly
# Test that an affect_AI object correctly scores samples
# words: foo, bar, baz, goo, car, caz, hoo, dar, daz, ioo, ear, eaz, loo, far, faz; corpora: happiness 1, satisfaction 2, elation 3
|
chore: Add test file for affect_ai classimport affect_ai
# Test that an affect_AI object gets created correctly
# Test that an affect_AI object can be trained, and builds vocabulary correctly
# Test that an affect_AI object correctly scores samples
# words: foo, bar, baz, goo, car, caz, hoo, dar, daz, ioo, ear, eaz, loo, far, faz; corpora: happiness 1, satisfaction 2, elation 3
|
<commit_before><commit_msg>chore: Add test file for affect_ai class<commit_after>import affect_ai
# Test that an affect_AI object gets created correctly
# Test that an affect_AI object can be trained, and builds vocabulary correctly
# Test that an affect_AI object correctly scores samples
# words: foo, bar, baz, goo, car, caz, hoo, dar, daz, ioo, ear, eaz, loo, far, faz; corpora: happiness 1, satisfaction 2, elation 3
|
|
01163ce7fc43b4ab2e5b9ab1c5f94556d0509004
|
examples/tornado/auth_demo.py
|
examples/tornado/auth_demo.py
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
|
Add the settings to the authdemo.
|
Add the settings to the authdemo.
|
Python
|
bsd-3-clause
|
niedbalski/mongrel2,jdesgats/mongrel2,winks/mongrel2,griffordson/mongrel2,cpick/mongrel2,duaneg/mongrel2,jagguli/mongrel2,mongrel2/mongrel2,issuu/mongrel2,metadave/mongrel2,steamraven/mongrel2,cpick/mongrel2,niedbalski/mongrel2,AustinWise/mongrel2,nmandery/mongrel2,markokr/mongrel2,reshefm/mongrel2,sshirokov/mongrel2,fanout/mongrel2,nickdesaulniers/mongrel2,mongrel2/mongrel2,ralphbean/mongrel2,aidenkeating/mongrel2,moai/mongrel2,jablkopp/mongrel2,jubarajborgohain/mongrel2,dermoth/mongrel2,bashi-bazouk/mongrel2,ameuret/mongrel2,metadave/mongrel2,markokr/mongrel2,AvdN/mongrel2,elo80ka/mongrel2,nickdesaulniers/mongrel2,wayneeseguin/mongrel2,mongrel2/mongrel2,steamraven/mongrel2,rpeterson/mongrel2,reshefm/mongrel2,ralphbean/mongrel2,winks/mongrel2,AlexVPopov/mongrel2,jiffyjeff/mongrel2,markokr/mongrel2,msteinert/mongrel2,AlexVPopov/mongrel2,duaneg/mongrel2,xrl/mongrel2,dermoth/mongrel2,musl/mongrel2,ameuret/mongrel2,aidenkeating/mongrel2,musl/mongrel2,moai/mongrel2,minrk/mongrel2,markokr/mongrel2,jagguli/mongrel2,jdesgats/mongrel2,apjanke/mongrel2,griffordson/mongrel2,AlexVPopov/mongrel2,xrl/mongrel2,krakensden/mongrel2,dermoth/mongrel2,AustinWise/mongrel2,elo80ka/mongrel2,cpick/mongrel2,msteinert/mongrel2,xrl/mongrel2,wayneeseguin/mongrel2,jasom/mongrel2,Gibheer/mongrel2,jubarajborgohain/mongrel2,jiffyjeff/mongrel2,musl/mongrel2,wayneeseguin/mongrel2,mbj/mongrel2,bashi-bazouk/mongrel2,apjanke/mongrel2,krakensden/mongrel2,fanout/mongrel2,musl/mongrel2,jasom/mongrel2,jubarajborgohain/mongrel2,sshirokov/mongrel2,jagguli/mongrel2,ameuret/mongrel2,msteinert/mongrel2,nickdesaulniers/mongrel2,jagguli/mongrel2,jablkopp/mongrel2,jablkopp/mongrel2,xrl/mongrel2,jasom/mongrel2,ralphbean/mongrel2,musl/mongrel2,rpeterson/mongrel2,yoink00/mongrel2,pjkundert/mongrel2,issuu/mongrel2,ameuret/mongrel2,yoink00/mongrel2,Gibheer/mongrel2,jdesgats/mongrel2,chickenkiller/mongrel2,nmandery/mongrel2,steamraven/mongrel2,nmandery/mongrel2,fanout/mongrel2,pjkundert/mongrel2,ralphbean/mongrel2,mbj/mongrel2,jagguli/mongrel2,steamraven/mongrel2,jdesgats/mongrel2,nickdesaulniers/mongrel2,rpeterson/mongrel2,fanout/mongrel2,minrk/mongrel2,elo80ka/mongrel2,aidenkeating/mongrel2,apjanke/mongrel2,apjanke/mongrel2,jdesgats/mongrel2,xrl/mongrel2,elo80ka/mongrel2,metadave/mongrel2,bashi-bazouk/mongrel2,krakensden/mongrel2,reshefm/mongrel2,mbj/mongrel2,jasom/mongrel2,moai/mongrel2,minrk/mongrel2,yoink00/mongrel2,AustinWise/mongrel2,moai/mongrel2,metadave/mongrel2,bashi-bazouk/mongrel2,nickdesaulniers/mongrel2,rpeterson/mongrel2,pjkundert/mongrel2,Gibheer/mongrel2,krakensden/mongrel2,krakensden/mongrel2,jablkopp/mongrel2,jablkopp/mongrel2,AvdN/mongrel2,wayneeseguin/mongrel2,issuu/mongrel2,bashi-bazouk/mongrel2,cpick/mongrel2,pjkundert/mongrel2,sshirokov/mongrel2,jiffyjeff/mongrel2,jasom/mongrel2,pjkundert/mongrel2,cpick/mongrel2,steamraven/mongrel2,minrk/mongrel2,cpick/mongrel2,jiffyjeff/mongrel2,issuu/mongrel2,yoink00/mongrel2,AlexVPopov/mongrel2,xrl/mongrel2,AlexVPopov/mongrel2,Gibheer/mongrel2,aidenkeating/mongrel2,apjanke/mongrel2,markokr/mongrel2,ameuret/mongrel2,mbj/mongrel2,griffordson/mongrel2,AvdN/mongrel2,AvdN/mongrel2,nmandery/mongrel2,ameuret/mongrel2,aidenkeating/mongrel2,chickenkiller/mongrel2,chickenkiller/mongrel2,fanout/mongrel2,jubarajborgohain/mongrel2,duaneg/mongrel2,winks/mongrel2,dermoth/mongrel2,moai/mongrel2,sshirokov/mongrel2,mongrel2/mongrel2,elo80ka/mongrel2,griffordson/mongrel2,minrk/mongrel2,niedbalski/mongrel2,mongrel2/mongrel2,aidenkeating/mongrel2,yoink00/mongrel2,yoink00/mongrel2,duaneg/mongrel2,winks/mongrel2,mongrel2/mongrel2,msteinert/mongrel2,niedbalski/mongrel2,jiffyjeff/mongrel2,chickenkiller/mongrel2,issuu/mongrel2,chickenkiller/mongrel2,reshefm/mongrel2,chickenkiller/mongrel2,AustinWise/mongrel2
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
Add the settings to the authdemo.
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
|
<commit_before>from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
<commit_msg>Add the settings to the authdemo.<commit_after>
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
|
from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
Add the settings to the authdemo.from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
|
<commit_before>from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main])
<commit_msg>Add the settings to the authdemo.<commit_after>from mongrel2.config import *
main = Server(
uuid="f400bf85-4538-4f7a-8908-67e313d515c2",
access_log="/logs/access.log",
error_log="/logs/error.log",
chroot="./",
default_host="localhost",
name="test",
pid_file="/run/mongrel2.pid",
port=6767,
hosts = [
Host(name="localhost",
routes={ r'/(.*)': Proxy(addr='127.0.0.1', port=8888) })
]
)
commit([main], settings={'limits.buffer_size': 4 * 1024})
|
3bd1692772dd40d6321280645491d00d4f53b1ac
|
python2.7-src/isomorphic_strings.py
|
python2.7-src/isomorphic_strings.py
|
#!/usr/bin/python
def check_isomorphic(s1, s2) :
d = dict()
if len(s1) != len(s2) :
return False
for i in range(0, len(s1)) :
if s1[i] in d :
if d[s1[i]] != s2[i] :
return False
else :
if s2[i] in d.values() :
return False
d[s1[i]] = s2[i]
return True
s1 = raw_input("Enter first string : ")
s2 = raw_input("Enter second string : ")
is_isomorphic = check_isomorphic(s1, s2)
if(is_isomorphic) :
print "The srings are isomorphic"
else :
print "The strings are NOT isomorphic"
|
Check if two strings are isomorphic
|
Check if two strings are isomorphic
|
Python
|
mit
|
diptin/dipti-coding-samples,diptin/dipti-coding-samples
|
Check if two strings are isomorphic
|
#!/usr/bin/python
def check_isomorphic(s1, s2) :
d = dict()
if len(s1) != len(s2) :
return False
for i in range(0, len(s1)) :
if s1[i] in d :
if d[s1[i]] != s2[i] :
return False
else :
if s2[i] in d.values() :
return False
d[s1[i]] = s2[i]
return True
s1 = raw_input("Enter first string : ")
s2 = raw_input("Enter second string : ")
is_isomorphic = check_isomorphic(s1, s2)
if(is_isomorphic) :
print "The srings are isomorphic"
else :
print "The strings are NOT isomorphic"
|
<commit_before><commit_msg>Check if two strings are isomorphic<commit_after>
|
#!/usr/bin/python
def check_isomorphic(s1, s2) :
d = dict()
if len(s1) != len(s2) :
return False
for i in range(0, len(s1)) :
if s1[i] in d :
if d[s1[i]] != s2[i] :
return False
else :
if s2[i] in d.values() :
return False
d[s1[i]] = s2[i]
return True
s1 = raw_input("Enter first string : ")
s2 = raw_input("Enter second string : ")
is_isomorphic = check_isomorphic(s1, s2)
if(is_isomorphic) :
print "The srings are isomorphic"
else :
print "The strings are NOT isomorphic"
|
Check if two strings are isomorphic#!/usr/bin/python
def check_isomorphic(s1, s2) :
d = dict()
if len(s1) != len(s2) :
return False
for i in range(0, len(s1)) :
if s1[i] in d :
if d[s1[i]] != s2[i] :
return False
else :
if s2[i] in d.values() :
return False
d[s1[i]] = s2[i]
return True
s1 = raw_input("Enter first string : ")
s2 = raw_input("Enter second string : ")
is_isomorphic = check_isomorphic(s1, s2)
if(is_isomorphic) :
print "The srings are isomorphic"
else :
print "The strings are NOT isomorphic"
|
<commit_before><commit_msg>Check if two strings are isomorphic<commit_after>#!/usr/bin/python
def check_isomorphic(s1, s2) :
d = dict()
if len(s1) != len(s2) :
return False
for i in range(0, len(s1)) :
if s1[i] in d :
if d[s1[i]] != s2[i] :
return False
else :
if s2[i] in d.values() :
return False
d[s1[i]] = s2[i]
return True
s1 = raw_input("Enter first string : ")
s2 = raw_input("Enter second string : ")
is_isomorphic = check_isomorphic(s1, s2)
if(is_isomorphic) :
print "The srings are isomorphic"
else :
print "The strings are NOT isomorphic"
|
|
d0a06aa69bd067c52964ed374c1238fe4f1e9c3f
|
server/forms/db_match.py
|
server/forms/db_match.py
|
from wtforms.validators import ValidationError
def db_match(model, message, *args, **kwargs):
fields = kwargs.pop('match_fields', None)
result = model.query.filter_by(*args, **kwargs).first()
if result is None:
raise ValidationError(message)
if fields is not None:
for key in fields:
if (not hasattr(result, key) or
result.key != fields[key]):
raise ValidationError(message)
|
Add custom validator for matching db fields
|
Add custom validator for matching db fields
|
Python
|
mit
|
ganemone/ontheside,ganemone/ontheside,ganemone/ontheside
|
Add custom validator for matching db fields
|
from wtforms.validators import ValidationError
def db_match(model, message, *args, **kwargs):
fields = kwargs.pop('match_fields', None)
result = model.query.filter_by(*args, **kwargs).first()
if result is None:
raise ValidationError(message)
if fields is not None:
for key in fields:
if (not hasattr(result, key) or
result.key != fields[key]):
raise ValidationError(message)
|
<commit_before><commit_msg>Add custom validator for matching db fields<commit_after>
|
from wtforms.validators import ValidationError
def db_match(model, message, *args, **kwargs):
fields = kwargs.pop('match_fields', None)
result = model.query.filter_by(*args, **kwargs).first()
if result is None:
raise ValidationError(message)
if fields is not None:
for key in fields:
if (not hasattr(result, key) or
result.key != fields[key]):
raise ValidationError(message)
|
Add custom validator for matching db fieldsfrom wtforms.validators import ValidationError
def db_match(model, message, *args, **kwargs):
fields = kwargs.pop('match_fields', None)
result = model.query.filter_by(*args, **kwargs).first()
if result is None:
raise ValidationError(message)
if fields is not None:
for key in fields:
if (not hasattr(result, key) or
result.key != fields[key]):
raise ValidationError(message)
|
<commit_before><commit_msg>Add custom validator for matching db fields<commit_after>from wtforms.validators import ValidationError
def db_match(model, message, *args, **kwargs):
fields = kwargs.pop('match_fields', None)
result = model.query.filter_by(*args, **kwargs).first()
if result is None:
raise ValidationError(message)
if fields is not None:
for key in fields:
if (not hasattr(result, key) or
result.key != fields[key]):
raise ValidationError(message)
|
|
35c4e42965ee8b4cc6735d03080b0ca69e7974d9
|
ddsc/generatedocs.py
|
ddsc/generatedocs.py
|
# Script to generate contents of https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands
from ddsc.ddsclient import DDSClient
from ddsc.versioncheck import get_internal_version_str
from argparse import SUPPRESS
import sys
# Fix argparse to have ddsclient instead of generatedocs.py as the command
sys.argv[0] = 'ddsclient'
DO_NOT_EDIT_WARNING = """
<!-- !!!NOTE!!!
This file is generated by running `python ddsc/generatedocs.py`.
DO NOT MANUALLY EDIT.
!!!NOTE!!! -->
"""
def create_parser():
return DDSClient()._create_parser().parser
def extract_help(cmd, action):
description = action.description
usage_help = action.format_usage()
action.description = SUPPRESS
action.usage = SUPPRESS
return cmd, description, action.format_help(), usage_help
def get_command_help(parser):
command_help = []
for group in parser._subparsers._group_actions:
for cmd, action in group.choices.items():
command_help.append(extract_help(cmd, action))
return command_help
def main():
parser = create_parser()
print(DO_NOT_EDIT_WARNING)
version_str = get_internal_version_str()
print(f"# DukeDSClient\nVersion {version_str}\n\n")
print(f"## Commands\n")
for cmd, description, help_text, usage_text in get_command_help(parser):
print(f"### {cmd}\n{description}\n")
print(f"```\n{usage_text}\n{help_text}\n```\n\n")
if __name__ == '__main__':
main()
|
Add script to generate wiki command documentation
|
Add script to generate wiki command documentation
Adds ddsc/generatedocs.py to generate the wiki command list.
When command line argument changes are made this script will
generate markdown that should be added to the wiki on the
[All-Commands](https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands)
page.
|
Python
|
mit
|
Duke-GCB/DukeDSClient,Duke-GCB/DukeDSClient
|
Add script to generate wiki command documentation
Adds ddsc/generatedocs.py to generate the wiki command list.
When command line argument changes are made this script will
generate markdown that should be added to the wiki on the
[All-Commands](https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands)
page.
|
# Script to generate contents of https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands
from ddsc.ddsclient import DDSClient
from ddsc.versioncheck import get_internal_version_str
from argparse import SUPPRESS
import sys
# Fix argparse to have ddsclient instead of generatedocs.py as the command
sys.argv[0] = 'ddsclient'
DO_NOT_EDIT_WARNING = """
<!-- !!!NOTE!!!
This file is generated by running `python ddsc/generatedocs.py`.
DO NOT MANUALLY EDIT.
!!!NOTE!!! -->
"""
def create_parser():
return DDSClient()._create_parser().parser
def extract_help(cmd, action):
description = action.description
usage_help = action.format_usage()
action.description = SUPPRESS
action.usage = SUPPRESS
return cmd, description, action.format_help(), usage_help
def get_command_help(parser):
command_help = []
for group in parser._subparsers._group_actions:
for cmd, action in group.choices.items():
command_help.append(extract_help(cmd, action))
return command_help
def main():
parser = create_parser()
print(DO_NOT_EDIT_WARNING)
version_str = get_internal_version_str()
print(f"# DukeDSClient\nVersion {version_str}\n\n")
print(f"## Commands\n")
for cmd, description, help_text, usage_text in get_command_help(parser):
print(f"### {cmd}\n{description}\n")
print(f"```\n{usage_text}\n{help_text}\n```\n\n")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate wiki command documentation
Adds ddsc/generatedocs.py to generate the wiki command list.
When command line argument changes are made this script will
generate markdown that should be added to the wiki on the
[All-Commands](https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands)
page.<commit_after>
|
# Script to generate contents of https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands
from ddsc.ddsclient import DDSClient
from ddsc.versioncheck import get_internal_version_str
from argparse import SUPPRESS
import sys
# Fix argparse to have ddsclient instead of generatedocs.py as the command
sys.argv[0] = 'ddsclient'
DO_NOT_EDIT_WARNING = """
<!-- !!!NOTE!!!
This file is generated by running `python ddsc/generatedocs.py`.
DO NOT MANUALLY EDIT.
!!!NOTE!!! -->
"""
def create_parser():
return DDSClient()._create_parser().parser
def extract_help(cmd, action):
description = action.description
usage_help = action.format_usage()
action.description = SUPPRESS
action.usage = SUPPRESS
return cmd, description, action.format_help(), usage_help
def get_command_help(parser):
command_help = []
for group in parser._subparsers._group_actions:
for cmd, action in group.choices.items():
command_help.append(extract_help(cmd, action))
return command_help
def main():
parser = create_parser()
print(DO_NOT_EDIT_WARNING)
version_str = get_internal_version_str()
print(f"# DukeDSClient\nVersion {version_str}\n\n")
print(f"## Commands\n")
for cmd, description, help_text, usage_text in get_command_help(parser):
print(f"### {cmd}\n{description}\n")
print(f"```\n{usage_text}\n{help_text}\n```\n\n")
if __name__ == '__main__':
main()
|
Add script to generate wiki command documentation
Adds ddsc/generatedocs.py to generate the wiki command list.
When command line argument changes are made this script will
generate markdown that should be added to the wiki on the
[All-Commands](https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands)
page.# Script to generate contents of https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands
from ddsc.ddsclient import DDSClient
from ddsc.versioncheck import get_internal_version_str
from argparse import SUPPRESS
import sys
# Fix argparse to have ddsclient instead of generatedocs.py as the command
sys.argv[0] = 'ddsclient'
DO_NOT_EDIT_WARNING = """
<!-- !!!NOTE!!!
This file is generated by running `python ddsc/generatedocs.py`.
DO NOT MANUALLY EDIT.
!!!NOTE!!! -->
"""
def create_parser():
return DDSClient()._create_parser().parser
def extract_help(cmd, action):
description = action.description
usage_help = action.format_usage()
action.description = SUPPRESS
action.usage = SUPPRESS
return cmd, description, action.format_help(), usage_help
def get_command_help(parser):
command_help = []
for group in parser._subparsers._group_actions:
for cmd, action in group.choices.items():
command_help.append(extract_help(cmd, action))
return command_help
def main():
parser = create_parser()
print(DO_NOT_EDIT_WARNING)
version_str = get_internal_version_str()
print(f"# DukeDSClient\nVersion {version_str}\n\n")
print(f"## Commands\n")
for cmd, description, help_text, usage_text in get_command_help(parser):
print(f"### {cmd}\n{description}\n")
print(f"```\n{usage_text}\n{help_text}\n```\n\n")
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script to generate wiki command documentation
Adds ddsc/generatedocs.py to generate the wiki command list.
When command line argument changes are made this script will
generate markdown that should be added to the wiki on the
[All-Commands](https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands)
page.<commit_after># Script to generate contents of https://github.com/Duke-GCB/DukeDSClient/wiki/All-Commands
from ddsc.ddsclient import DDSClient
from ddsc.versioncheck import get_internal_version_str
from argparse import SUPPRESS
import sys
# Fix argparse to have ddsclient instead of generatedocs.py as the command
sys.argv[0] = 'ddsclient'
DO_NOT_EDIT_WARNING = """
<!-- !!!NOTE!!!
This file is generated by running `python ddsc/generatedocs.py`.
DO NOT MANUALLY EDIT.
!!!NOTE!!! -->
"""
def create_parser():
return DDSClient()._create_parser().parser
def extract_help(cmd, action):
description = action.description
usage_help = action.format_usage()
action.description = SUPPRESS
action.usage = SUPPRESS
return cmd, description, action.format_help(), usage_help
def get_command_help(parser):
command_help = []
for group in parser._subparsers._group_actions:
for cmd, action in group.choices.items():
command_help.append(extract_help(cmd, action))
return command_help
def main():
parser = create_parser()
print(DO_NOT_EDIT_WARNING)
version_str = get_internal_version_str()
print(f"# DukeDSClient\nVersion {version_str}\n\n")
print(f"## Commands\n")
for cmd, description, help_text, usage_text in get_command_help(parser):
print(f"### {cmd}\n{description}\n")
print(f"```\n{usage_text}\n{help_text}\n```\n\n")
if __name__ == '__main__':
main()
|
|
dc8a01c20b3844c78dd6ffdc9e411ffe9e853b2b
|
server/ntb/macros/nb_NO_to_nn_NO_metadata_macro.py
|
server/ntb/macros/nb_NO_to_nn_NO_metadata_macro.py
|
"""
nb-NO to nn-NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def nb_NO_to_nn_NO_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'Bokmal to Nynorsk Metadata Macro'
label = 'Translate to Nynorsk Macro'
callback = nb_NO_to_nn_NO_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['nb-NO']
to_languages = ['nn-NO']
|
Add nb-NO to bb-NO macro(Created by Karel)
|
feat(translation): Add nb-NO to bb-NO macro(Created by Karel)
|
Python
|
agpl-3.0
|
petrjasek/superdesk-ntb,superdesk/superdesk-ntb,superdesk/superdesk-ntb,superdesk/superdesk-ntb,ioanpocol/superdesk-ntb,petrjasek/superdesk-ntb,ioanpocol/superdesk-ntb,ioanpocol/superdesk-ntb,petrjasek/superdesk-ntb,petrjasek/superdesk-ntb,superdesk/superdesk-ntb
|
feat(translation): Add nb-NO to bb-NO macro(Created by Karel)
|
"""
nb-NO to nn-NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def nb_NO_to_nn_NO_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'Bokmal to Nynorsk Metadata Macro'
label = 'Translate to Nynorsk Macro'
callback = nb_NO_to_nn_NO_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['nb-NO']
to_languages = ['nn-NO']
|
<commit_before><commit_msg>feat(translation): Add nb-NO to bb-NO macro(Created by Karel)<commit_after>
|
"""
nb-NO to nn-NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def nb_NO_to_nn_NO_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'Bokmal to Nynorsk Metadata Macro'
label = 'Translate to Nynorsk Macro'
callback = nb_NO_to_nn_NO_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['nb-NO']
to_languages = ['nn-NO']
|
feat(translation): Add nb-NO to bb-NO macro(Created by Karel)"""
nb-NO to nn-NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def nb_NO_to_nn_NO_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'Bokmal to Nynorsk Metadata Macro'
label = 'Translate to Nynorsk Macro'
callback = nb_NO_to_nn_NO_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['nb-NO']
to_languages = ['nn-NO']
|
<commit_before><commit_msg>feat(translation): Add nb-NO to bb-NO macro(Created by Karel)<commit_after>"""
nb-NO to nn-NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def nb_NO_to_nn_NO_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'Bokmal to Nynorsk Metadata Macro'
label = 'Translate to Nynorsk Macro'
callback = nb_NO_to_nn_NO_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['nb-NO']
to_languages = ['nn-NO']
|
|
32a23e1b4122976685fa38f3937f28f543642d77
|
services/content_metrics.py
|
services/content_metrics.py
|
"""This module contains utility functions to print reports of the
database contents.
The need is to find examples of extreme or pathological content
with either long field contents or a large amount of related
objects.
"""
from django.db.models.functions import Length
from django.db.models import Count, Sum, Case, When, IntegerField
from services.models import Unit
ESERVICE_LINK_SECTION_TYPE = 9
def unit_description_longest(limit=10):
qs = Unit.objects.filter(description__isnull=False).annotate(
text_len=Length('description')
).order_by('-text_len')[:limit]
return [(u, u.text_len) for u in qs]
def unit_most_services(limit=10):
qs = Unit.objects.annotate(num_services=Count('services')).order_by(
'-num_services')[:limit]
return [(u, u.num_services) for u in qs]
def unit_most_eservice_connections(limit=10):
# https://stackoverflow.com/questions/30752268/how-to-filter-objects-for-count-annotation-in-django
units = Unit.objects.filter(connections__section_type=ESERVICE_LINK_SECTION_TYPE).annotate(
eservice_links=Sum(
Case(
When(connections__section_type=ESERVICE_LINK_SECTION_TYPE, then=1),
default=0, output_field=IntegerField()))).order_by('-eservice_links')[:limit]
return [(u, u.eservice_links) for u in units]
def unit_most_services_without_periods(limit=10):
units = Unit.objects.filter(services__period_enabled=False).annotate(
num_services=Sum(
Case(
When(services__period_enabled=False, then=1),
default=0, output_field=IntegerField()))).order_by('-num_services')[:limit]
return [(u, u.num_services) for u in units]
def unit_ui_url(unit):
return "https://palvelukartta.hel.fi/unit/{}".format(unit.id)
def format_unit(unit):
return "Name: {}\n id: {}\n URL: {}\n".format(
unit.name_fi, unit.id, unit_ui_url(unit))
def print_units(units):
for u, value in units:
print(format_unit(u), 'measured value:', value)
print()
|
Add saved database queries for exploring database contents
|
Add saved database queries for exploring database contents
|
Python
|
agpl-3.0
|
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
|
Add saved database queries for exploring database contents
|
"""This module contains utility functions to print reports of the
database contents.
The need is to find examples of extreme or pathological content
with either long field contents or a large amount of related
objects.
"""
from django.db.models.functions import Length
from django.db.models import Count, Sum, Case, When, IntegerField
from services.models import Unit
ESERVICE_LINK_SECTION_TYPE = 9
def unit_description_longest(limit=10):
qs = Unit.objects.filter(description__isnull=False).annotate(
text_len=Length('description')
).order_by('-text_len')[:limit]
return [(u, u.text_len) for u in qs]
def unit_most_services(limit=10):
qs = Unit.objects.annotate(num_services=Count('services')).order_by(
'-num_services')[:limit]
return [(u, u.num_services) for u in qs]
def unit_most_eservice_connections(limit=10):
# https://stackoverflow.com/questions/30752268/how-to-filter-objects-for-count-annotation-in-django
units = Unit.objects.filter(connections__section_type=ESERVICE_LINK_SECTION_TYPE).annotate(
eservice_links=Sum(
Case(
When(connections__section_type=ESERVICE_LINK_SECTION_TYPE, then=1),
default=0, output_field=IntegerField()))).order_by('-eservice_links')[:limit]
return [(u, u.eservice_links) for u in units]
def unit_most_services_without_periods(limit=10):
units = Unit.objects.filter(services__period_enabled=False).annotate(
num_services=Sum(
Case(
When(services__period_enabled=False, then=1),
default=0, output_field=IntegerField()))).order_by('-num_services')[:limit]
return [(u, u.num_services) for u in units]
def unit_ui_url(unit):
return "https://palvelukartta.hel.fi/unit/{}".format(unit.id)
def format_unit(unit):
return "Name: {}\n id: {}\n URL: {}\n".format(
unit.name_fi, unit.id, unit_ui_url(unit))
def print_units(units):
for u, value in units:
print(format_unit(u), 'measured value:', value)
print()
|
<commit_before><commit_msg>Add saved database queries for exploring database contents<commit_after>
|
"""This module contains utility functions to print reports of the
database contents.
The need is to find examples of extreme or pathological content
with either long field contents or a large amount of related
objects.
"""
from django.db.models.functions import Length
from django.db.models import Count, Sum, Case, When, IntegerField
from services.models import Unit
ESERVICE_LINK_SECTION_TYPE = 9
def unit_description_longest(limit=10):
qs = Unit.objects.filter(description__isnull=False).annotate(
text_len=Length('description')
).order_by('-text_len')[:limit]
return [(u, u.text_len) for u in qs]
def unit_most_services(limit=10):
qs = Unit.objects.annotate(num_services=Count('services')).order_by(
'-num_services')[:limit]
return [(u, u.num_services) for u in qs]
def unit_most_eservice_connections(limit=10):
# https://stackoverflow.com/questions/30752268/how-to-filter-objects-for-count-annotation-in-django
units = Unit.objects.filter(connections__section_type=ESERVICE_LINK_SECTION_TYPE).annotate(
eservice_links=Sum(
Case(
When(connections__section_type=ESERVICE_LINK_SECTION_TYPE, then=1),
default=0, output_field=IntegerField()))).order_by('-eservice_links')[:limit]
return [(u, u.eservice_links) for u in units]
def unit_most_services_without_periods(limit=10):
units = Unit.objects.filter(services__period_enabled=False).annotate(
num_services=Sum(
Case(
When(services__period_enabled=False, then=1),
default=0, output_field=IntegerField()))).order_by('-num_services')[:limit]
return [(u, u.num_services) for u in units]
def unit_ui_url(unit):
return "https://palvelukartta.hel.fi/unit/{}".format(unit.id)
def format_unit(unit):
return "Name: {}\n id: {}\n URL: {}\n".format(
unit.name_fi, unit.id, unit_ui_url(unit))
def print_units(units):
for u, value in units:
print(format_unit(u), 'measured value:', value)
print()
|
Add saved database queries for exploring database contents"""This module contains utility functions to print reports of the
database contents.
The need is to find examples of extreme or pathological content
with either long field contents or a large amount of related
objects.
"""
from django.db.models.functions import Length
from django.db.models import Count, Sum, Case, When, IntegerField
from services.models import Unit
ESERVICE_LINK_SECTION_TYPE = 9
def unit_description_longest(limit=10):
qs = Unit.objects.filter(description__isnull=False).annotate(
text_len=Length('description')
).order_by('-text_len')[:limit]
return [(u, u.text_len) for u in qs]
def unit_most_services(limit=10):
qs = Unit.objects.annotate(num_services=Count('services')).order_by(
'-num_services')[:limit]
return [(u, u.num_services) for u in qs]
def unit_most_eservice_connections(limit=10):
# https://stackoverflow.com/questions/30752268/how-to-filter-objects-for-count-annotation-in-django
units = Unit.objects.filter(connections__section_type=ESERVICE_LINK_SECTION_TYPE).annotate(
eservice_links=Sum(
Case(
When(connections__section_type=ESERVICE_LINK_SECTION_TYPE, then=1),
default=0, output_field=IntegerField()))).order_by('-eservice_links')[:limit]
return [(u, u.eservice_links) for u in units]
def unit_most_services_without_periods(limit=10):
units = Unit.objects.filter(services__period_enabled=False).annotate(
num_services=Sum(
Case(
When(services__period_enabled=False, then=1),
default=0, output_field=IntegerField()))).order_by('-num_services')[:limit]
return [(u, u.num_services) for u in units]
def unit_ui_url(unit):
return "https://palvelukartta.hel.fi/unit/{}".format(unit.id)
def format_unit(unit):
return "Name: {}\n id: {}\n URL: {}\n".format(
unit.name_fi, unit.id, unit_ui_url(unit))
def print_units(units):
for u, value in units:
print(format_unit(u), 'measured value:', value)
print()
|
<commit_before><commit_msg>Add saved database queries for exploring database contents<commit_after>"""This module contains utility functions to print reports of the
database contents.
The need is to find examples of extreme or pathological content
with either long field contents or a large amount of related
objects.
"""
from django.db.models.functions import Length
from django.db.models import Count, Sum, Case, When, IntegerField
from services.models import Unit
ESERVICE_LINK_SECTION_TYPE = 9
def unit_description_longest(limit=10):
qs = Unit.objects.filter(description__isnull=False).annotate(
text_len=Length('description')
).order_by('-text_len')[:limit]
return [(u, u.text_len) for u in qs]
def unit_most_services(limit=10):
qs = Unit.objects.annotate(num_services=Count('services')).order_by(
'-num_services')[:limit]
return [(u, u.num_services) for u in qs]
def unit_most_eservice_connections(limit=10):
# https://stackoverflow.com/questions/30752268/how-to-filter-objects-for-count-annotation-in-django
units = Unit.objects.filter(connections__section_type=ESERVICE_LINK_SECTION_TYPE).annotate(
eservice_links=Sum(
Case(
When(connections__section_type=ESERVICE_LINK_SECTION_TYPE, then=1),
default=0, output_field=IntegerField()))).order_by('-eservice_links')[:limit]
return [(u, u.eservice_links) for u in units]
def unit_most_services_without_periods(limit=10):
units = Unit.objects.filter(services__period_enabled=False).annotate(
num_services=Sum(
Case(
When(services__period_enabled=False, then=1),
default=0, output_field=IntegerField()))).order_by('-num_services')[:limit]
return [(u, u.num_services) for u in units]
def unit_ui_url(unit):
return "https://palvelukartta.hel.fi/unit/{}".format(unit.id)
def format_unit(unit):
return "Name: {}\n id: {}\n URL: {}\n".format(
unit.name_fi, unit.id, unit_ui_url(unit))
def print_units(units):
for u, value in units:
print(format_unit(u), 'measured value:', value)
print()
|
|
ee80300bf284972b111e71bf88422e31aaa2e565
|
tests/test_pricing_integration.py
|
tests/test_pricing_integration.py
|
import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
class TestAddproduct(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get("http://localhost:9761/pricing_quote")
time.sleep(1)
def tearDown(self):
self.driver.quit()
def test_addproduct(self):
self.driver.set_window_size(1920, 1080)
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(2) .add-to-quote .glyphicon").click()
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote .glyphicon").click()
# Fetch the ref_id of the product added
e = self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote")
id = e.get_attribute('data-product-id')
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="One item of this product is added")
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products added to the quote")
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="Two items of this product are added")
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products (2+1) added to the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(2) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 1, msg="Removing one product should leave just one in the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(1) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 0, msg="Removing the last product should leave no product in the quote")
|
Test for adding products to the quote list
|
Test for adding products to the quote list
|
Python
|
mit
|
remiolsen/status,remiolsen/status,SciLifeLab/genomics-status,SciLifeLab/genomics-status,remiolsen/status,SciLifeLab/genomics-status
|
Test for adding products to the quote list
|
import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
class TestAddproduct(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get("http://localhost:9761/pricing_quote")
time.sleep(1)
def tearDown(self):
self.driver.quit()
def test_addproduct(self):
self.driver.set_window_size(1920, 1080)
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(2) .add-to-quote .glyphicon").click()
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote .glyphicon").click()
# Fetch the ref_id of the product added
e = self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote")
id = e.get_attribute('data-product-id')
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="One item of this product is added")
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products added to the quote")
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="Two items of this product are added")
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products (2+1) added to the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(2) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 1, msg="Removing one product should leave just one in the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(1) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 0, msg="Removing the last product should leave no product in the quote")
|
<commit_before><commit_msg>Test for adding products to the quote list<commit_after>
|
import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
class TestAddproduct(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get("http://localhost:9761/pricing_quote")
time.sleep(1)
def tearDown(self):
self.driver.quit()
def test_addproduct(self):
self.driver.set_window_size(1920, 1080)
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(2) .add-to-quote .glyphicon").click()
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote .glyphicon").click()
# Fetch the ref_id of the product added
e = self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote")
id = e.get_attribute('data-product-id')
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="One item of this product is added")
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products added to the quote")
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="Two items of this product are added")
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products (2+1) added to the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(2) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 1, msg="Removing one product should leave just one in the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(1) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 0, msg="Removing the last product should leave no product in the quote")
|
Test for adding products to the quote listimport time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
class TestAddproduct(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get("http://localhost:9761/pricing_quote")
time.sleep(1)
def tearDown(self):
self.driver.quit()
def test_addproduct(self):
self.driver.set_window_size(1920, 1080)
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(2) .add-to-quote .glyphicon").click()
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote .glyphicon").click()
# Fetch the ref_id of the product added
e = self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote")
id = e.get_attribute('data-product-id')
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="One item of this product is added")
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products added to the quote")
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="Two items of this product are added")
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products (2+1) added to the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(2) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 1, msg="Removing one product should leave just one in the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(1) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 0, msg="Removing the last product should leave no product in the quote")
|
<commit_before><commit_msg>Test for adding products to the quote list<commit_after>import time
import unittest
from selenium import webdriver
from selenium.webdriver.common.by import By
class TestAddproduct(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Chrome()
self.driver.get("http://localhost:9761/pricing_quote")
time.sleep(1)
def tearDown(self):
self.driver.quit()
def test_addproduct(self):
self.driver.set_window_size(1920, 1080)
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(2) .add-to-quote .glyphicon").click()
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote .glyphicon").click()
# Fetch the ref_id of the product added
e = self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .add-to-quote")
id = e.get_attribute('data-product-id')
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="One item of this product is added")
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products added to the quote")
e = self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list input[data-product-id='{}']".format(id))
self.assertEqual(e.get_attribute('value'), '1', msg="Two items of this product are added")
self.driver.find_element(By.CSS_SELECTOR, ".status_available:nth-child(5) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 2, msg="Two products (2+1) added to the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(2) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 1, msg="Removing one product should leave just one in the quote")
self.driver.find_element(By.CSS_SELECTOR, ".quote-product-list li:nth-child(1) .glyphicon").click()
elements = self.driver.find_elements(By.CSS_SELECTOR, ".quote-product-list li > .quote_product_name")
self.assertEqual(len(elements), 0, msg="Removing the last product should leave no product in the quote")
|
|
a9d6067704b23170e5c623790410674b13968fc1
|
froide/publicbody/migrations/0004_auto_20161130_0128.py
|
froide/publicbody/migrations/0004_auto_20161130_0128.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-30 00:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0003_auto_20160123_1336'),
]
operations = [
migrations.AlterField(
model_name='publicbody',
name='_created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_creators', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='publicbody',
name='_updated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_updaters', to=settings.AUTH_USER_MODEL, verbose_name='Updated by'),
),
]
|
Add publicbody migration for on_delete
|
Add publicbody migration for on_delete
|
Python
|
mit
|
fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide
|
Add publicbody migration for on_delete
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-30 00:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0003_auto_20160123_1336'),
]
operations = [
migrations.AlterField(
model_name='publicbody',
name='_created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_creators', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='publicbody',
name='_updated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_updaters', to=settings.AUTH_USER_MODEL, verbose_name='Updated by'),
),
]
|
<commit_before><commit_msg>Add publicbody migration for on_delete<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-30 00:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0003_auto_20160123_1336'),
]
operations = [
migrations.AlterField(
model_name='publicbody',
name='_created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_creators', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='publicbody',
name='_updated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_updaters', to=settings.AUTH_USER_MODEL, verbose_name='Updated by'),
),
]
|
Add publicbody migration for on_delete# -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-30 00:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0003_auto_20160123_1336'),
]
operations = [
migrations.AlterField(
model_name='publicbody',
name='_created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_creators', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='publicbody',
name='_updated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_updaters', to=settings.AUTH_USER_MODEL, verbose_name='Updated by'),
),
]
|
<commit_before><commit_msg>Add publicbody migration for on_delete<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-11-30 00:28
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('publicbody', '0003_auto_20160123_1336'),
]
operations = [
migrations.AlterField(
model_name='publicbody',
name='_created_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_creators', to=settings.AUTH_USER_MODEL, verbose_name='Created by'),
),
migrations.AlterField(
model_name='publicbody',
name='_updated_by',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='public_body_updaters', to=settings.AUTH_USER_MODEL, verbose_name='Updated by'),
),
]
|
|
c19302a9171c91dfd0bba28582253124aa4a71eb
|
lab/disruptors/n9k_ip_block.py
|
lab/disruptors/n9k_ip_block.py
|
def start(context, log, args):
import time
duration = args['duration']
period = 20
n9k1_ip, n9k2_ip, _, _ = context.n9k_creds()
log.info('Blocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
start_time = time.time()
for controller in context.controllers():
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
while start_time + duration > time.time():
log.info('N9K IPs are blocked.')
time.sleep(period)
log.info('Unblocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
for controller in context.controllers():
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
|
Add n9k IP iptables block
|
Add n9k IP iptables block
Change-Id: Ife7168aebd5de6d017c48d3d7b6b1a0227c25f7c
|
Python
|
apache-2.0
|
CiscoSystems/os-sqe,CiscoSystems/os-sqe,CiscoSystems/os-sqe
|
Add n9k IP iptables block
Change-Id: Ife7168aebd5de6d017c48d3d7b6b1a0227c25f7c
|
def start(context, log, args):
import time
duration = args['duration']
period = 20
n9k1_ip, n9k2_ip, _, _ = context.n9k_creds()
log.info('Blocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
start_time = time.time()
for controller in context.controllers():
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
while start_time + duration > time.time():
log.info('N9K IPs are blocked.')
time.sleep(period)
log.info('Unblocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
for controller in context.controllers():
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
|
<commit_before><commit_msg>Add n9k IP iptables block
Change-Id: Ife7168aebd5de6d017c48d3d7b6b1a0227c25f7c<commit_after>
|
def start(context, log, args):
import time
duration = args['duration']
period = 20
n9k1_ip, n9k2_ip, _, _ = context.n9k_creds()
log.info('Blocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
start_time = time.time()
for controller in context.controllers():
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
while start_time + duration > time.time():
log.info('N9K IPs are blocked.')
time.sleep(period)
log.info('Unblocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
for controller in context.controllers():
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
|
Add n9k IP iptables block
Change-Id: Ife7168aebd5de6d017c48d3d7b6b1a0227c25f7cdef start(context, log, args):
import time
duration = args['duration']
period = 20
n9k1_ip, n9k2_ip, _, _ = context.n9k_creds()
log.info('Blocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
start_time = time.time()
for controller in context.controllers():
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
while start_time + duration > time.time():
log.info('N9K IPs are blocked.')
time.sleep(period)
log.info('Unblocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
for controller in context.controllers():
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
|
<commit_before><commit_msg>Add n9k IP iptables block
Change-Id: Ife7168aebd5de6d017c48d3d7b6b1a0227c25f7c<commit_after>def start(context, log, args):
import time
duration = args['duration']
period = 20
n9k1_ip, n9k2_ip, _, _ = context.n9k_creds()
log.info('Blocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
start_time = time.time()
for controller in context.controllers():
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -A OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
while start_time + duration > time.time():
log.info('N9K IPs are blocked.')
time.sleep(period)
log.info('Unblocking N9K IPs ({0},{1}) on controllers ...'.format(n9k1_ip, n9k2_ip))
for controller in context.controllers():
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k1_ip))
controller.run(command='iptables -D OUTPUT -d {0}/32 -j DROP'.format(n9k2_ip))
|
|
3ca9df7a41653f6c4c8381580b104478c794564f
|
zou/migrations/versions/a65bdadbae2f_.py
|
zou/migrations/versions/a65bdadbae2f_.py
|
"""empty message
Revision ID: a65bdadbae2f
Revises: e1ef93f40d3d
Create Date: 2019-01-15 12:19:59.813805
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'a65bdadbae2f'
down_revision = 'e1ef93f40d3d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata_descriptor',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('entity_type', sa.String(length=60), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('field_name', sa.String(length=120), nullable=False),
sa.Column('choices', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'entity_type', 'name', name='metadata_descriptor_uc')
)
op.create_index(op.f('ix_metadata_descriptor_entity_type'), 'metadata_descriptor', ['entity_type'], unique=False)
op.create_index(op.f('ix_metadata_descriptor_project_id'), 'metadata_descriptor', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_metadata_descriptor_project_id'), table_name='metadata_descriptor')
op.drop_index(op.f('ix_metadata_descriptor_entity_type'), table_name='metadata_descriptor')
op.drop_table('metadata_descriptor')
# ### end Alembic commands ###
|
Add migration file for metadata descriptor
|
Add migration file for metadata descriptor
|
Python
|
agpl-3.0
|
cgwire/zou
|
Add migration file for metadata descriptor
|
"""empty message
Revision ID: a65bdadbae2f
Revises: e1ef93f40d3d
Create Date: 2019-01-15 12:19:59.813805
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'a65bdadbae2f'
down_revision = 'e1ef93f40d3d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata_descriptor',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('entity_type', sa.String(length=60), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('field_name', sa.String(length=120), nullable=False),
sa.Column('choices', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'entity_type', 'name', name='metadata_descriptor_uc')
)
op.create_index(op.f('ix_metadata_descriptor_entity_type'), 'metadata_descriptor', ['entity_type'], unique=False)
op.create_index(op.f('ix_metadata_descriptor_project_id'), 'metadata_descriptor', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_metadata_descriptor_project_id'), table_name='metadata_descriptor')
op.drop_index(op.f('ix_metadata_descriptor_entity_type'), table_name='metadata_descriptor')
op.drop_table('metadata_descriptor')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for metadata descriptor<commit_after>
|
"""empty message
Revision ID: a65bdadbae2f
Revises: e1ef93f40d3d
Create Date: 2019-01-15 12:19:59.813805
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'a65bdadbae2f'
down_revision = 'e1ef93f40d3d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata_descriptor',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('entity_type', sa.String(length=60), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('field_name', sa.String(length=120), nullable=False),
sa.Column('choices', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'entity_type', 'name', name='metadata_descriptor_uc')
)
op.create_index(op.f('ix_metadata_descriptor_entity_type'), 'metadata_descriptor', ['entity_type'], unique=False)
op.create_index(op.f('ix_metadata_descriptor_project_id'), 'metadata_descriptor', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_metadata_descriptor_project_id'), table_name='metadata_descriptor')
op.drop_index(op.f('ix_metadata_descriptor_entity_type'), table_name='metadata_descriptor')
op.drop_table('metadata_descriptor')
# ### end Alembic commands ###
|
Add migration file for metadata descriptor"""empty message
Revision ID: a65bdadbae2f
Revises: e1ef93f40d3d
Create Date: 2019-01-15 12:19:59.813805
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'a65bdadbae2f'
down_revision = 'e1ef93f40d3d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata_descriptor',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('entity_type', sa.String(length=60), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('field_name', sa.String(length=120), nullable=False),
sa.Column('choices', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'entity_type', 'name', name='metadata_descriptor_uc')
)
op.create_index(op.f('ix_metadata_descriptor_entity_type'), 'metadata_descriptor', ['entity_type'], unique=False)
op.create_index(op.f('ix_metadata_descriptor_project_id'), 'metadata_descriptor', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_metadata_descriptor_project_id'), table_name='metadata_descriptor')
op.drop_index(op.f('ix_metadata_descriptor_entity_type'), table_name='metadata_descriptor')
op.drop_table('metadata_descriptor')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for metadata descriptor<commit_after>"""empty message
Revision ID: a65bdadbae2f
Revises: e1ef93f40d3d
Create Date: 2019-01-15 12:19:59.813805
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
from sqlalchemy.dialects import postgresql
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'a65bdadbae2f'
down_revision = 'e1ef93f40d3d'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('metadata_descriptor',
sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=False),
sa.Column('entity_type', sa.String(length=60), nullable=False),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('field_name', sa.String(length=120), nullable=False),
sa.Column('choices', postgresql.JSONB(astext_type=sa.Text()), nullable=True),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('project_id', 'entity_type', 'name', name='metadata_descriptor_uc')
)
op.create_index(op.f('ix_metadata_descriptor_entity_type'), 'metadata_descriptor', ['entity_type'], unique=False)
op.create_index(op.f('ix_metadata_descriptor_project_id'), 'metadata_descriptor', ['project_id'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_metadata_descriptor_project_id'), table_name='metadata_descriptor')
op.drop_index(op.f('ix_metadata_descriptor_entity_type'), table_name='metadata_descriptor')
op.drop_table('metadata_descriptor')
# ### end Alembic commands ###
|
|
7930d0b328fff6edef376c8b6401c5bb7c241311
|
tests/test_lexer.py
|
tests/test_lexer.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 Raphaël Barrois
"""Tests for token-related code."""
import re
import unittest
import tdparser
class ParenthesizedLexerTestCase(unittest.TestCase):
"""Test lexing parenthesized expressions."""
def setUp(self):
class RightParen(tdparser.Token):
"""A right parenthesis"""
pass
class LeftParen(tdparser.Token):
"""A left parenthesis"""
def nud(self, context):
"""Read the content of the (...) block."""
# Contains parsed data.
contents = []
while not isinstance(context.current_token, RightParen):
contents.append(context.expression())
next_token = context.consume(RightParen)
return [self.text] + contents + [next_token.text]
l = tdparser.Lexer(default_tokens=False)
l.register_token(LeftParen, re.compile(r'\('))
l.register_token(RightParen, re.compile(r'\)'))
self.lexer = l
def test_trivial(self):
expr = self.lexer.parse('()')
self.assertEqual(['(', ')'], expr)
def test_reads_a_single_expression(self):
expr = self.lexer.parse('()()')
self.assertEqual(['(', ')'], expr)
def test_nested(self):
expr = self.lexer.parse('(())')
self.assertEqual(['(', ['(', ')'], ')'], expr)
def test_chained_nested(self):
expr = self.lexer.parse('(()())')
self.assertEqual(['(', ['(', ')'], ['(', ')'], ')'], expr)
def test_complex(self):
expr = self.lexer.parse('(((()())())()(()(())(()()()()())))')
self.assertEqual(
['(',
['(',
['(',
['(', ')'],
['(', ')'],
')'],
['(', ')'],
')'],
['(', ')'],
['(',
['(', ')'],
['(',
['(', ')'],
')'],
['(',
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
')'],
')'],
')'], expr)
|
Add tests for the lexer.
|
Add tests for the lexer.
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polytechnique.org>
|
Python
|
bsd-2-clause
|
rbarrois/tdparser,rbarrois/tdparser
|
Add tests for the lexer.
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polytechnique.org>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 Raphaël Barrois
"""Tests for token-related code."""
import re
import unittest
import tdparser
class ParenthesizedLexerTestCase(unittest.TestCase):
"""Test lexing parenthesized expressions."""
def setUp(self):
class RightParen(tdparser.Token):
"""A right parenthesis"""
pass
class LeftParen(tdparser.Token):
"""A left parenthesis"""
def nud(self, context):
"""Read the content of the (...) block."""
# Contains parsed data.
contents = []
while not isinstance(context.current_token, RightParen):
contents.append(context.expression())
next_token = context.consume(RightParen)
return [self.text] + contents + [next_token.text]
l = tdparser.Lexer(default_tokens=False)
l.register_token(LeftParen, re.compile(r'\('))
l.register_token(RightParen, re.compile(r'\)'))
self.lexer = l
def test_trivial(self):
expr = self.lexer.parse('()')
self.assertEqual(['(', ')'], expr)
def test_reads_a_single_expression(self):
expr = self.lexer.parse('()()')
self.assertEqual(['(', ')'], expr)
def test_nested(self):
expr = self.lexer.parse('(())')
self.assertEqual(['(', ['(', ')'], ')'], expr)
def test_chained_nested(self):
expr = self.lexer.parse('(()())')
self.assertEqual(['(', ['(', ')'], ['(', ')'], ')'], expr)
def test_complex(self):
expr = self.lexer.parse('(((()())())()(()(())(()()()()())))')
self.assertEqual(
['(',
['(',
['(',
['(', ')'],
['(', ')'],
')'],
['(', ')'],
')'],
['(', ')'],
['(',
['(', ')'],
['(',
['(', ')'],
')'],
['(',
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
')'],
')'],
')'], expr)
|
<commit_before><commit_msg>Add tests for the lexer.
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polytechnique.org><commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 Raphaël Barrois
"""Tests for token-related code."""
import re
import unittest
import tdparser
class ParenthesizedLexerTestCase(unittest.TestCase):
"""Test lexing parenthesized expressions."""
def setUp(self):
class RightParen(tdparser.Token):
"""A right parenthesis"""
pass
class LeftParen(tdparser.Token):
"""A left parenthesis"""
def nud(self, context):
"""Read the content of the (...) block."""
# Contains parsed data.
contents = []
while not isinstance(context.current_token, RightParen):
contents.append(context.expression())
next_token = context.consume(RightParen)
return [self.text] + contents + [next_token.text]
l = tdparser.Lexer(default_tokens=False)
l.register_token(LeftParen, re.compile(r'\('))
l.register_token(RightParen, re.compile(r'\)'))
self.lexer = l
def test_trivial(self):
expr = self.lexer.parse('()')
self.assertEqual(['(', ')'], expr)
def test_reads_a_single_expression(self):
expr = self.lexer.parse('()()')
self.assertEqual(['(', ')'], expr)
def test_nested(self):
expr = self.lexer.parse('(())')
self.assertEqual(['(', ['(', ')'], ')'], expr)
def test_chained_nested(self):
expr = self.lexer.parse('(()())')
self.assertEqual(['(', ['(', ')'], ['(', ')'], ')'], expr)
def test_complex(self):
expr = self.lexer.parse('(((()())())()(()(())(()()()()())))')
self.assertEqual(
['(',
['(',
['(',
['(', ')'],
['(', ')'],
')'],
['(', ')'],
')'],
['(', ')'],
['(',
['(', ')'],
['(',
['(', ')'],
')'],
['(',
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
')'],
')'],
')'], expr)
|
Add tests for the lexer.
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polytechnique.org>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 Raphaël Barrois
"""Tests for token-related code."""
import re
import unittest
import tdparser
class ParenthesizedLexerTestCase(unittest.TestCase):
"""Test lexing parenthesized expressions."""
def setUp(self):
class RightParen(tdparser.Token):
"""A right parenthesis"""
pass
class LeftParen(tdparser.Token):
"""A left parenthesis"""
def nud(self, context):
"""Read the content of the (...) block."""
# Contains parsed data.
contents = []
while not isinstance(context.current_token, RightParen):
contents.append(context.expression())
next_token = context.consume(RightParen)
return [self.text] + contents + [next_token.text]
l = tdparser.Lexer(default_tokens=False)
l.register_token(LeftParen, re.compile(r'\('))
l.register_token(RightParen, re.compile(r'\)'))
self.lexer = l
def test_trivial(self):
expr = self.lexer.parse('()')
self.assertEqual(['(', ')'], expr)
def test_reads_a_single_expression(self):
expr = self.lexer.parse('()()')
self.assertEqual(['(', ')'], expr)
def test_nested(self):
expr = self.lexer.parse('(())')
self.assertEqual(['(', ['(', ')'], ')'], expr)
def test_chained_nested(self):
expr = self.lexer.parse('(()())')
self.assertEqual(['(', ['(', ')'], ['(', ')'], ')'], expr)
def test_complex(self):
expr = self.lexer.parse('(((()())())()(()(())(()()()()())))')
self.assertEqual(
['(',
['(',
['(',
['(', ')'],
['(', ')'],
')'],
['(', ')'],
')'],
['(', ')'],
['(',
['(', ')'],
['(',
['(', ')'],
')'],
['(',
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
')'],
')'],
')'], expr)
|
<commit_before><commit_msg>Add tests for the lexer.
Signed-off-by: Raphaël Barrois <8eb3b37a023209373fcd61a2fdc08256a14fb19c@polytechnique.org><commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 Raphaël Barrois
"""Tests for token-related code."""
import re
import unittest
import tdparser
class ParenthesizedLexerTestCase(unittest.TestCase):
"""Test lexing parenthesized expressions."""
def setUp(self):
class RightParen(tdparser.Token):
"""A right parenthesis"""
pass
class LeftParen(tdparser.Token):
"""A left parenthesis"""
def nud(self, context):
"""Read the content of the (...) block."""
# Contains parsed data.
contents = []
while not isinstance(context.current_token, RightParen):
contents.append(context.expression())
next_token = context.consume(RightParen)
return [self.text] + contents + [next_token.text]
l = tdparser.Lexer(default_tokens=False)
l.register_token(LeftParen, re.compile(r'\('))
l.register_token(RightParen, re.compile(r'\)'))
self.lexer = l
def test_trivial(self):
expr = self.lexer.parse('()')
self.assertEqual(['(', ')'], expr)
def test_reads_a_single_expression(self):
expr = self.lexer.parse('()()')
self.assertEqual(['(', ')'], expr)
def test_nested(self):
expr = self.lexer.parse('(())')
self.assertEqual(['(', ['(', ')'], ')'], expr)
def test_chained_nested(self):
expr = self.lexer.parse('(()())')
self.assertEqual(['(', ['(', ')'], ['(', ')'], ')'], expr)
def test_complex(self):
expr = self.lexer.parse('(((()())())()(()(())(()()()()())))')
self.assertEqual(
['(',
['(',
['(',
['(', ')'],
['(', ')'],
')'],
['(', ')'],
')'],
['(', ')'],
['(',
['(', ')'],
['(',
['(', ')'],
')'],
['(',
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
['(', ')'],
')'],
')'],
')'], expr)
|
|
e8b48ef94fbffea54ba39b6397d283b67d49ced0
|
cameo/visualization/sympy_ext.py
|
cameo/visualization/sympy_ext.py
|
# Copyright 2015 Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sympy import And, Or
def _str_or(self):
return "(" + " OR ".join([str(v) for v in self.args]) + ")"
Or.__str__ = _str_or
def _str_and(self):
return "(" + " AND ".join([str(v) for v in self.args]) + ")"
And.__str__ = _str_and
|
Patch sympy to return COBRA compatible expressions on str
|
Patch sympy to return COBRA compatible expressions on str
|
Python
|
apache-2.0
|
biosustain/cameo,biosustain/cameo
|
Patch sympy to return COBRA compatible expressions on str
|
# Copyright 2015 Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sympy import And, Or
def _str_or(self):
return "(" + " OR ".join([str(v) for v in self.args]) + ")"
Or.__str__ = _str_or
def _str_and(self):
return "(" + " AND ".join([str(v) for v in self.args]) + ")"
And.__str__ = _str_and
|
<commit_before><commit_msg>Patch sympy to return COBRA compatible expressions on str<commit_after>
|
# Copyright 2015 Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sympy import And, Or
def _str_or(self):
return "(" + " OR ".join([str(v) for v in self.args]) + ")"
Or.__str__ = _str_or
def _str_and(self):
return "(" + " AND ".join([str(v) for v in self.args]) + ")"
And.__str__ = _str_and
|
Patch sympy to return COBRA compatible expressions on str# Copyright 2015 Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sympy import And, Or
def _str_or(self):
return "(" + " OR ".join([str(v) for v in self.args]) + ")"
Or.__str__ = _str_or
def _str_and(self):
return "(" + " AND ".join([str(v) for v in self.args]) + ")"
And.__str__ = _str_and
|
<commit_before><commit_msg>Patch sympy to return COBRA compatible expressions on str<commit_after># Copyright 2015 Novo Nordisk Foundation Center for Biosustainability, DTU.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sympy import And, Or
def _str_or(self):
return "(" + " OR ".join([str(v) for v in self.args]) + ")"
Or.__str__ = _str_or
def _str_and(self):
return "(" + " AND ".join([str(v) for v in self.args]) + ")"
And.__str__ = _str_and
|
|
7980fdf9dbc31072ed2659c4d9b3392a1c31917b
|
tests/integration/dropdown/test_dynamic_options.py
|
tests/integration/dropdown/test_dynamic_options.py
|
import dash
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import dash.testing.wait as wait
import dash_core_components as dcc
def test_ddsv001_dynamic_options(dash_duo):
options=[
{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
]
app = dash.Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
dash.dependencies.Output('my-dynamic-dropdown', 'options'),
[dash.dependencies.Input('my-dynamic-dropdown', 'search_value')],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in options if search_value in o['label']]
dash_duo.start_server(app)
# Get the inner input used for search value.
dropdown = dash_duo.find_element("#my-dynamic-dropdown")
input_ = dropdown.find_element_by_css_selector("input")
# Focus on the input to open the options menu
input_.send_keys("x")
#No options to be found with `x` in them, should show the empty message.
dash_duo.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
|
Add test for dynamic options from server
|
Add test for dynamic options from server
|
Python
|
mit
|
plotly/dash-core-components
|
Add test for dynamic options from server
|
import dash
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import dash.testing.wait as wait
import dash_core_components as dcc
def test_ddsv001_dynamic_options(dash_duo):
options=[
{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
]
app = dash.Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
dash.dependencies.Output('my-dynamic-dropdown', 'options'),
[dash.dependencies.Input('my-dynamic-dropdown', 'search_value')],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in options if search_value in o['label']]
dash_duo.start_server(app)
# Get the inner input used for search value.
dropdown = dash_duo.find_element("#my-dynamic-dropdown")
input_ = dropdown.find_element_by_css_selector("input")
# Focus on the input to open the options menu
input_.send_keys("x")
#No options to be found with `x` in them, should show the empty message.
dash_duo.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
|
<commit_before><commit_msg>Add test for dynamic options from server<commit_after>
|
import dash
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import dash.testing.wait as wait
import dash_core_components as dcc
def test_ddsv001_dynamic_options(dash_duo):
options=[
{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
]
app = dash.Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
dash.dependencies.Output('my-dynamic-dropdown', 'options'),
[dash.dependencies.Input('my-dynamic-dropdown', 'search_value')],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in options if search_value in o['label']]
dash_duo.start_server(app)
# Get the inner input used for search value.
dropdown = dash_duo.find_element("#my-dynamic-dropdown")
input_ = dropdown.find_element_by_css_selector("input")
# Focus on the input to open the options menu
input_.send_keys("x")
#No options to be found with `x` in them, should show the empty message.
dash_duo.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
|
Add test for dynamic options from serverimport dash
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import dash.testing.wait as wait
import dash_core_components as dcc
def test_ddsv001_dynamic_options(dash_duo):
options=[
{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
]
app = dash.Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
dash.dependencies.Output('my-dynamic-dropdown', 'options'),
[dash.dependencies.Input('my-dynamic-dropdown', 'search_value')],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in options if search_value in o['label']]
dash_duo.start_server(app)
# Get the inner input used for search value.
dropdown = dash_duo.find_element("#my-dynamic-dropdown")
input_ = dropdown.find_element_by_css_selector("input")
# Focus on the input to open the options menu
input_.send_keys("x")
#No options to be found with `x` in them, should show the empty message.
dash_duo.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
|
<commit_before><commit_msg>Add test for dynamic options from server<commit_after>import dash
from dash.dependencies import Input, Output, State
from dash.exceptions import PreventUpdate
import dash.testing.wait as wait
import dash_core_components as dcc
def test_ddsv001_dynamic_options(dash_duo):
options=[
{'label': 'New York City', 'value': 'NYC'},
{'label': 'Montreal', 'value': 'MTL'},
{'label': 'San Francisco', 'value': 'SF'}
]
app = dash.Dash(__name__)
app.layout = dcc.Dropdown(id="my-dynamic-dropdown", options=[])
@app.callback(
dash.dependencies.Output('my-dynamic-dropdown', 'options'),
[dash.dependencies.Input('my-dynamic-dropdown', 'search_value')],
)
def update_options(search_value):
if not search_value:
raise PreventUpdate
return [o for o in options if search_value in o['label']]
dash_duo.start_server(app)
# Get the inner input used for search value.
dropdown = dash_duo.find_element("#my-dynamic-dropdown")
input_ = dropdown.find_element_by_css_selector("input")
# Focus on the input to open the options menu
input_.send_keys("x")
#No options to be found with `x` in them, should show the empty message.
dash_duo.wait_for_text_to_equal(".Select-noresults", "No results found")
input_.clear()
input_.send_keys("o")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
# Should show all options.
assert len(options) == 3
# Searching for `on`
input_.send_keys("n")
options = dropdown.find_elements_by_css_selector(".VirtualizedSelectOption")
assert len(options) == 1
print(options)
assert options[0].text == "Montreal"
|
|
50f440f3c7dbfaa328e6cd16e762ec41a9147f54
|
raggregate/rg_migrations/versions/011_Add_notify_by_mail_to_users.py
|
raggregate/rg_migrations/versions/011_Add_notify_by_mail_to_users.py
|
from sqlalchemy import *
from migrate import *
from raggregate.guid_recipe import GUID
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
notify_by_mailc = Column('notify_by_mail', Boolean, default=True)
notify_by_mailc.create(users)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
users.c.notify_by_mail.drop()
|
Include migration for notify_by_mail. ...
|
notify: Include migration for notify_by_mail. ...
Required for commit 651c0f984f7c75 or later to work.
|
Python
|
apache-2.0
|
sjuxax/raggregate
|
notify: Include migration for notify_by_mail. ...
Required for commit 651c0f984f7c75 or later to work.
|
from sqlalchemy import *
from migrate import *
from raggregate.guid_recipe import GUID
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
notify_by_mailc = Column('notify_by_mail', Boolean, default=True)
notify_by_mailc.create(users)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
users.c.notify_by_mail.drop()
|
<commit_before><commit_msg>notify: Include migration for notify_by_mail. ...
Required for commit 651c0f984f7c75 or later to work.<commit_after>
|
from sqlalchemy import *
from migrate import *
from raggregate.guid_recipe import GUID
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
notify_by_mailc = Column('notify_by_mail', Boolean, default=True)
notify_by_mailc.create(users)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
users.c.notify_by_mail.drop()
|
notify: Include migration for notify_by_mail. ...
Required for commit 651c0f984f7c75 or later to work.from sqlalchemy import *
from migrate import *
from raggregate.guid_recipe import GUID
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
notify_by_mailc = Column('notify_by_mail', Boolean, default=True)
notify_by_mailc.create(users)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
users.c.notify_by_mail.drop()
|
<commit_before><commit_msg>notify: Include migration for notify_by_mail. ...
Required for commit 651c0f984f7c75 or later to work.<commit_after>from sqlalchemy import *
from migrate import *
from raggregate.guid_recipe import GUID
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
notify_by_mailc = Column('notify_by_mail', Boolean, default=True)
notify_by_mailc.create(users)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
users = Table('users', meta, autoload=True)
users.c.notify_by_mail.drop()
|
|
d0c8c7b61906e65137057406b5b94a01f9c61b84
|
keepsimplecms/__init__.py
|
keepsimplecms/__init__.py
|
class ViewBase(object):
"""
Base class for all views and nodes.
"""
# database session
_session = None
# the scope is the dict sent to the template engine
_scope = {}
def __init__(self, request=None):
"""
Save a reference to the Pyramid request object.
"""
self._request = request
def __call__(self):
"""
Make the class as a callable function witch can be rendered.
"""
self.render()
return self._scope
def scope(self, *arg):
"""
Getter/setter to the scope.
"""
# return a value from the scope
if len(arg) == 1 and isinstance(arg[0], str):
return self._scope.get(arg[0], '')
# set only one value to the scope
elif len(arg) == 2:
self._scope[arg[0]] = arg[1]
# set value(s) to the scope
elif len(arg) == 1 and isinstance(arg[0], dict):
for key, value in arg[0].iteritems():
self._scope[key] = value
def render(self):
"""
Set variables to the scope before rendering.
To be extended by the view.
"""
pass
def node(self, node):
"""
Instanciate the :py:class:`Node` `node` and return its html code.
"""
return node(self._request)()
class View(ViewBase):
"""
Define the default variables in the scope.
"""
_scope = {
'layout': 'templates/layouts/default.html',
'page_title': 'Sample title'
}
class Node(ViewBase):
"""
A node is a ViewBase child which is not declared as a view. Therefore, a
node is not mapped to an URL.
A node represents a part of the HTML page and implements its own logic since
the request and the DBSession objects are available.
The page is build from several nodes, each ones should be independant and
can be reused in different views.
Since no view and route declaration is done for a node, a template must be
declared.
"""
_template = None
def __call__(self):
self.render()
if not self._template:
return ''
from pyramid.renderers import render
return render(self._template, self._scope, self._request)
|
Move code to the root of the main module.
|
Move code to the root of the main module.
|
Python
|
bsd-3-clause
|
cr0cK/keepsimple.cms,cr0cK/keepsimple.cms,cr0cK/keepsimple.cms
|
Move code to the root of the main module.
|
class ViewBase(object):
"""
Base class for all views and nodes.
"""
# database session
_session = None
# the scope is the dict sent to the template engine
_scope = {}
def __init__(self, request=None):
"""
Save a reference to the Pyramid request object.
"""
self._request = request
def __call__(self):
"""
Make the class as a callable function witch can be rendered.
"""
self.render()
return self._scope
def scope(self, *arg):
"""
Getter/setter to the scope.
"""
# return a value from the scope
if len(arg) == 1 and isinstance(arg[0], str):
return self._scope.get(arg[0], '')
# set only one value to the scope
elif len(arg) == 2:
self._scope[arg[0]] = arg[1]
# set value(s) to the scope
elif len(arg) == 1 and isinstance(arg[0], dict):
for key, value in arg[0].iteritems():
self._scope[key] = value
def render(self):
"""
Set variables to the scope before rendering.
To be extended by the view.
"""
pass
def node(self, node):
"""
Instanciate the :py:class:`Node` `node` and return its html code.
"""
return node(self._request)()
class View(ViewBase):
"""
Define the default variables in the scope.
"""
_scope = {
'layout': 'templates/layouts/default.html',
'page_title': 'Sample title'
}
class Node(ViewBase):
"""
A node is a ViewBase child which is not declared as a view. Therefore, a
node is not mapped to an URL.
A node represents a part of the HTML page and implements its own logic since
the request and the DBSession objects are available.
The page is build from several nodes, each ones should be independant and
can be reused in different views.
Since no view and route declaration is done for a node, a template must be
declared.
"""
_template = None
def __call__(self):
self.render()
if not self._template:
return ''
from pyramid.renderers import render
return render(self._template, self._scope, self._request)
|
<commit_before><commit_msg>Move code to the root of the main module.<commit_after>
|
class ViewBase(object):
"""
Base class for all views and nodes.
"""
# database session
_session = None
# the scope is the dict sent to the template engine
_scope = {}
def __init__(self, request=None):
"""
Save a reference to the Pyramid request object.
"""
self._request = request
def __call__(self):
"""
Make the class as a callable function witch can be rendered.
"""
self.render()
return self._scope
def scope(self, *arg):
"""
Getter/setter to the scope.
"""
# return a value from the scope
if len(arg) == 1 and isinstance(arg[0], str):
return self._scope.get(arg[0], '')
# set only one value to the scope
elif len(arg) == 2:
self._scope[arg[0]] = arg[1]
# set value(s) to the scope
elif len(arg) == 1 and isinstance(arg[0], dict):
for key, value in arg[0].iteritems():
self._scope[key] = value
def render(self):
"""
Set variables to the scope before rendering.
To be extended by the view.
"""
pass
def node(self, node):
"""
Instanciate the :py:class:`Node` `node` and return its html code.
"""
return node(self._request)()
class View(ViewBase):
"""
Define the default variables in the scope.
"""
_scope = {
'layout': 'templates/layouts/default.html',
'page_title': 'Sample title'
}
class Node(ViewBase):
"""
A node is a ViewBase child which is not declared as a view. Therefore, a
node is not mapped to an URL.
A node represents a part of the HTML page and implements its own logic since
the request and the DBSession objects are available.
The page is build from several nodes, each ones should be independant and
can be reused in different views.
Since no view and route declaration is done for a node, a template must be
declared.
"""
_template = None
def __call__(self):
self.render()
if not self._template:
return ''
from pyramid.renderers import render
return render(self._template, self._scope, self._request)
|
Move code to the root of the main module.class ViewBase(object):
"""
Base class for all views and nodes.
"""
# database session
_session = None
# the scope is the dict sent to the template engine
_scope = {}
def __init__(self, request=None):
"""
Save a reference to the Pyramid request object.
"""
self._request = request
def __call__(self):
"""
Make the class as a callable function witch can be rendered.
"""
self.render()
return self._scope
def scope(self, *arg):
"""
Getter/setter to the scope.
"""
# return a value from the scope
if len(arg) == 1 and isinstance(arg[0], str):
return self._scope.get(arg[0], '')
# set only one value to the scope
elif len(arg) == 2:
self._scope[arg[0]] = arg[1]
# set value(s) to the scope
elif len(arg) == 1 and isinstance(arg[0], dict):
for key, value in arg[0].iteritems():
self._scope[key] = value
def render(self):
"""
Set variables to the scope before rendering.
To be extended by the view.
"""
pass
def node(self, node):
"""
Instanciate the :py:class:`Node` `node` and return its html code.
"""
return node(self._request)()
class View(ViewBase):
"""
Define the default variables in the scope.
"""
_scope = {
'layout': 'templates/layouts/default.html',
'page_title': 'Sample title'
}
class Node(ViewBase):
"""
A node is a ViewBase child which is not declared as a view. Therefore, a
node is not mapped to an URL.
A node represents a part of the HTML page and implements its own logic since
the request and the DBSession objects are available.
The page is build from several nodes, each ones should be independant and
can be reused in different views.
Since no view and route declaration is done for a node, a template must be
declared.
"""
_template = None
def __call__(self):
self.render()
if not self._template:
return ''
from pyramid.renderers import render
return render(self._template, self._scope, self._request)
|
<commit_before><commit_msg>Move code to the root of the main module.<commit_after>class ViewBase(object):
"""
Base class for all views and nodes.
"""
# database session
_session = None
# the scope is the dict sent to the template engine
_scope = {}
def __init__(self, request=None):
"""
Save a reference to the Pyramid request object.
"""
self._request = request
def __call__(self):
"""
Make the class as a callable function witch can be rendered.
"""
self.render()
return self._scope
def scope(self, *arg):
"""
Getter/setter to the scope.
"""
# return a value from the scope
if len(arg) == 1 and isinstance(arg[0], str):
return self._scope.get(arg[0], '')
# set only one value to the scope
elif len(arg) == 2:
self._scope[arg[0]] = arg[1]
# set value(s) to the scope
elif len(arg) == 1 and isinstance(arg[0], dict):
for key, value in arg[0].iteritems():
self._scope[key] = value
def render(self):
"""
Set variables to the scope before rendering.
To be extended by the view.
"""
pass
def node(self, node):
"""
Instanciate the :py:class:`Node` `node` and return its html code.
"""
return node(self._request)()
class View(ViewBase):
"""
Define the default variables in the scope.
"""
_scope = {
'layout': 'templates/layouts/default.html',
'page_title': 'Sample title'
}
class Node(ViewBase):
"""
A node is a ViewBase child which is not declared as a view. Therefore, a
node is not mapped to an URL.
A node represents a part of the HTML page and implements its own logic since
the request and the DBSession objects are available.
The page is build from several nodes, each ones should be independant and
can be reused in different views.
Since no view and route declaration is done for a node, a template must be
declared.
"""
_template = None
def __call__(self):
self.render()
if not self._template:
return ''
from pyramid.renderers import render
return render(self._template, self._scope, self._request)
|
|
9c60d9e5a2a2f64e9bed0a9a6223bc9c886977fd
|
PropertyVerification/HEmpty_IsolatedConnectedLHS.py
|
PropertyVerification/HEmpty_IsolatedConnectedLHS.py
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HEmpty_IsolatedConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEmpty_IsolatedConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEmpty_IsolatedConnectedLHS, self).__init__(name='HEmpty_IsolatedConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """empty_IsolatedConnected"""
self["GUID__"] = 6754891666562252613
# Set the node attributes
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
Add empty property to property verification folder.
|
Add empty property to property verification folder.
|
Python
|
mit
|
levilucio/SyVOLT,levilucio/SyVOLT
|
Add empty property to property verification folder.
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HEmpty_IsolatedConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEmpty_IsolatedConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEmpty_IsolatedConnectedLHS, self).__init__(name='HEmpty_IsolatedConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """empty_IsolatedConnected"""
self["GUID__"] = 6754891666562252613
# Set the node attributes
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
<commit_before><commit_msg>Add empty property to property verification folder.<commit_after>
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HEmpty_IsolatedConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEmpty_IsolatedConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEmpty_IsolatedConnectedLHS, self).__init__(name='HEmpty_IsolatedConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """empty_IsolatedConnected"""
self["GUID__"] = 6754891666562252613
# Set the node attributes
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
Add empty property to property verification folder.
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HEmpty_IsolatedConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEmpty_IsolatedConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEmpty_IsolatedConnectedLHS, self).__init__(name='HEmpty_IsolatedConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """empty_IsolatedConnected"""
self["GUID__"] = 6754891666562252613
# Set the node attributes
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
<commit_before><commit_msg>Add empty property to property verification folder.<commit_after>
from core.himesis import Himesis, HimesisPreConditionPatternLHS
class HEmpty_IsolatedConnectedLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HEmpty_IsolatedConnectedLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HEmpty_IsolatedConnectedLHS, self).__init__(name='HEmpty_IsolatedConnectedLHS', num_nodes=0, edges=[])
# Add the edges
self.add_edges([])
# Set the graph attributes
self["mm__"] = ['MoTifRule']
self["MT_constraint__"] = """#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
"""
self["name"] = """empty_IsolatedConnected"""
self["GUID__"] = 6754891666562252613
# Set the node attributes
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#===============================================================================
# This code is executed after the nodes in the LHS have been matched.
# You can access a matched node labelled n by: PreNode('n').
# To access attribute x of node n, use: PreNode('n')['x'].
# The given constraint must evaluate to a boolean expression:
# returning True enables the rule to be applied,
# returning False forbids the rule from being applied.
#===============================================================================
return True
|
|
864403b982f9a2cb1c4266ebe80ff3ca4d8a572a
|
langcodes/test_multithread.py
|
langcodes/test_multithread.py
|
"""
This file implements testing ing langcodes module for multithreaded env
Problem is still there if you try to acccess that module from multiple places at once
"""
import threading
from twisted.internet import reactor
from tag_parser import parse_tag
def parseMe(i, tag):
print i, parse_tag(tag)
def startProcessing():
for i, tag in enumerate(('en_US', 'en', 'en_gb')):
reactor.callInThread(parseMe, i, tag)
reactor.callInThread(startProcessing)
reactor.run()
|
Test for multithreading with twisted framework
|
Test for multithreading with twisted framework
|
Python
|
mit
|
darkman66/langcodes
|
Test for multithreading with twisted framework
|
"""
This file implements testing ing langcodes module for multithreaded env
Problem is still there if you try to acccess that module from multiple places at once
"""
import threading
from twisted.internet import reactor
from tag_parser import parse_tag
def parseMe(i, tag):
print i, parse_tag(tag)
def startProcessing():
for i, tag in enumerate(('en_US', 'en', 'en_gb')):
reactor.callInThread(parseMe, i, tag)
reactor.callInThread(startProcessing)
reactor.run()
|
<commit_before><commit_msg>Test for multithreading with twisted framework<commit_after>
|
"""
This file implements testing ing langcodes module for multithreaded env
Problem is still there if you try to acccess that module from multiple places at once
"""
import threading
from twisted.internet import reactor
from tag_parser import parse_tag
def parseMe(i, tag):
print i, parse_tag(tag)
def startProcessing():
for i, tag in enumerate(('en_US', 'en', 'en_gb')):
reactor.callInThread(parseMe, i, tag)
reactor.callInThread(startProcessing)
reactor.run()
|
Test for multithreading with twisted framework"""
This file implements testing ing langcodes module for multithreaded env
Problem is still there if you try to acccess that module from multiple places at once
"""
import threading
from twisted.internet import reactor
from tag_parser import parse_tag
def parseMe(i, tag):
print i, parse_tag(tag)
def startProcessing():
for i, tag in enumerate(('en_US', 'en', 'en_gb')):
reactor.callInThread(parseMe, i, tag)
reactor.callInThread(startProcessing)
reactor.run()
|
<commit_before><commit_msg>Test for multithreading with twisted framework<commit_after>"""
This file implements testing ing langcodes module for multithreaded env
Problem is still there if you try to acccess that module from multiple places at once
"""
import threading
from twisted.internet import reactor
from tag_parser import parse_tag
def parseMe(i, tag):
print i, parse_tag(tag)
def startProcessing():
for i, tag in enumerate(('en_US', 'en', 'en_gb')):
reactor.callInThread(parseMe, i, tag)
reactor.callInThread(startProcessing)
reactor.run()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.