commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
20f7b0296f0e7139a69f94ca8c80c9ad1b73c011
|
tests/test_package.py
|
tests/test_package.py
|
import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
|
Add a test for package generation
|
Add a test for package generation
|
Python
|
mit
|
jboss-dockerfiles/dogen,goldmann/dogen,jboss-container-images/concreate,jboss-container-images/concreate,goldmann/dogen,goldmann/dogen,jboss-container-images/concreate,jboss-dockerfiles/dogen,jboss-dockerfiles/dogen
|
Add a test for package generation
|
import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
|
<commit_before><commit_msg>Add a test for package generation<commit_after>
|
import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
|
Add a test for package generationimport argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
|
<commit_before><commit_msg>Add a test for package generation<commit_after>import argparse
import mock
import os
import tempfile
import unittest
import shutil
import re
import sys
from dogen.plugins.repo import Repo
from dogen.generator import Generator
class TestPackage(unittest.TestCase):
def setUp(self):
self.workdir = tempfile.mkdtemp(prefix='test_repo_plugin')
self.descriptor = tempfile.NamedTemporaryFile(delete=False)
self.target_dir = os.path.join(self.workdir, "target")
self.log = mock.Mock()
def teardown(self):
shutil.rmtree(self.workdir)
def write_config(self, config):
with self.descriptor as f:
f.write(config.encode())
def prepare_dogen(self, repo_files_dir=None):
args = argparse.Namespace(path=self.descriptor.name, output=self.target_dir, without_sources=None,
template=None, scripts_path=None, additional_script=None,
skip_ssl_verification=None, repo_files_dir=repo_files_dir)
self.dogen = Generator(self.log, args, [Repo])
def test_custom_repo_files_should_add_two(self):
open(os.path.join(self.workdir, "fedora.repo"), 'a').close()
open(os.path.join(self.workdir, "test.repo"), 'a').close()
self.write_config("release: '1'\nversion: '1'\ncmd:\n - whoami\nfrom: scratch\nname: someimage\npackages:\n - wget")
self.prepare_dogen(self.workdir)
self.dogen.run()
self.assertIsNotNone(self.dogen.cfg)
self.assertIsNotNone(self.dogen.cfg.get('packages'))
self.assertIsInstance(self.dogen.cfg.get('packages'), list)
self.assertIn("wget", self.dogen.cfg.get('packages'))
dockerfile = open(os.path.join(self.target_dir, "Dockerfile")).read()
sys.stderr.write("\t\t\tDEBUGDEBUG\n{}\n".format(dockerfile))
self.assertTrue(re.match(r'.*yum install[^\n]+wget', dockerfile, re.DOTALL))
self.assertTrue(re.match(r'.*rpm -q +wget', dockerfile, re.DOTALL))
|
|
2567596cdc1a3dc0a00aee122793a5733f8f9abb
|
import_data.py
|
import_data.py
|
from __future__ import print_function
import os
import re
#http://stackoverflow.com/questions/3964681/find-all-files-in-directory-with-extension-txt-with-python
create_table_file = open('create_table.sql', 'r')
output_file = open('import_data.sql', 'w')
print('drop table districts;', file=output_file)
for line in create_table_file:
print(line, file=output_file)
print('.mode csv', file=output_file)
print('create table if not exists districts (name TEXT, state TEXT, classification TEXT, household_total INTEGER, population_total INTEGER);', file=output_file)
def import_from_file(filename):
print('.import "data/'+filename+'" raw_data', file=output_file)
for filename in os.listdir("data/"):
if filename.endswith('.CSV'):
import_from_file(filename)
state_name = re.sub(r'.CSV', '', filename)
state_name = re.sub(r'[()\d]', '', state_name)
state_name = state_name.strip()
print('insert or replace into districts select Name, "'+state_name+'", TRU, "No of households", "Total Population Person" from raw_data where Level=\'DISTRICT\';', file=output_file)
print('drop table raw_data;', file=output_file)
|
Create .sql file to automatically import data into database
|
Create .sql file to automatically import data into database
|
Python
|
bsd-3-clause
|
rkawauchi/IHK,rkawauchi/IHK
|
Create .sql file to automatically import data into database
|
from __future__ import print_function
import os
import re
#http://stackoverflow.com/questions/3964681/find-all-files-in-directory-with-extension-txt-with-python
create_table_file = open('create_table.sql', 'r')
output_file = open('import_data.sql', 'w')
print('drop table districts;', file=output_file)
for line in create_table_file:
print(line, file=output_file)
print('.mode csv', file=output_file)
print('create table if not exists districts (name TEXT, state TEXT, classification TEXT, household_total INTEGER, population_total INTEGER);', file=output_file)
def import_from_file(filename):
print('.import "data/'+filename+'" raw_data', file=output_file)
for filename in os.listdir("data/"):
if filename.endswith('.CSV'):
import_from_file(filename)
state_name = re.sub(r'.CSV', '', filename)
state_name = re.sub(r'[()\d]', '', state_name)
state_name = state_name.strip()
print('insert or replace into districts select Name, "'+state_name+'", TRU, "No of households", "Total Population Person" from raw_data where Level=\'DISTRICT\';', file=output_file)
print('drop table raw_data;', file=output_file)
|
<commit_before><commit_msg>Create .sql file to automatically import data into database<commit_after>
|
from __future__ import print_function
import os
import re
#http://stackoverflow.com/questions/3964681/find-all-files-in-directory-with-extension-txt-with-python
create_table_file = open('create_table.sql', 'r')
output_file = open('import_data.sql', 'w')
print('drop table districts;', file=output_file)
for line in create_table_file:
print(line, file=output_file)
print('.mode csv', file=output_file)
print('create table if not exists districts (name TEXT, state TEXT, classification TEXT, household_total INTEGER, population_total INTEGER);', file=output_file)
def import_from_file(filename):
print('.import "data/'+filename+'" raw_data', file=output_file)
for filename in os.listdir("data/"):
if filename.endswith('.CSV'):
import_from_file(filename)
state_name = re.sub(r'.CSV', '', filename)
state_name = re.sub(r'[()\d]', '', state_name)
state_name = state_name.strip()
print('insert or replace into districts select Name, "'+state_name+'", TRU, "No of households", "Total Population Person" from raw_data where Level=\'DISTRICT\';', file=output_file)
print('drop table raw_data;', file=output_file)
|
Create .sql file to automatically import data into databasefrom __future__ import print_function
import os
import re
#http://stackoverflow.com/questions/3964681/find-all-files-in-directory-with-extension-txt-with-python
create_table_file = open('create_table.sql', 'r')
output_file = open('import_data.sql', 'w')
print('drop table districts;', file=output_file)
for line in create_table_file:
print(line, file=output_file)
print('.mode csv', file=output_file)
print('create table if not exists districts (name TEXT, state TEXT, classification TEXT, household_total INTEGER, population_total INTEGER);', file=output_file)
def import_from_file(filename):
print('.import "data/'+filename+'" raw_data', file=output_file)
for filename in os.listdir("data/"):
if filename.endswith('.CSV'):
import_from_file(filename)
state_name = re.sub(r'.CSV', '', filename)
state_name = re.sub(r'[()\d]', '', state_name)
state_name = state_name.strip()
print('insert or replace into districts select Name, "'+state_name+'", TRU, "No of households", "Total Population Person" from raw_data where Level=\'DISTRICT\';', file=output_file)
print('drop table raw_data;', file=output_file)
|
<commit_before><commit_msg>Create .sql file to automatically import data into database<commit_after>from __future__ import print_function
import os
import re
#http://stackoverflow.com/questions/3964681/find-all-files-in-directory-with-extension-txt-with-python
create_table_file = open('create_table.sql', 'r')
output_file = open('import_data.sql', 'w')
print('drop table districts;', file=output_file)
for line in create_table_file:
print(line, file=output_file)
print('.mode csv', file=output_file)
print('create table if not exists districts (name TEXT, state TEXT, classification TEXT, household_total INTEGER, population_total INTEGER);', file=output_file)
def import_from_file(filename):
print('.import "data/'+filename+'" raw_data', file=output_file)
for filename in os.listdir("data/"):
if filename.endswith('.CSV'):
import_from_file(filename)
state_name = re.sub(r'.CSV', '', filename)
state_name = re.sub(r'[()\d]', '', state_name)
state_name = state_name.strip()
print('insert or replace into districts select Name, "'+state_name+'", TRU, "No of households", "Total Population Person" from raw_data where Level=\'DISTRICT\';', file=output_file)
print('drop table raw_data;', file=output_file)
|
|
6c607c2d8c903febe9e787f627a6be41544b629e
|
lintcode/Easy/112_Delete_Duplicates.py
|
lintcode/Easy/112_Delete_Duplicates.py
|
"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param head: A ListNode
@return: A ListNode
"""
def deleteDuplicates(self, head):
# write your code here
tmp = head
while (tmp and tmp.next):
if (tmp.val != tmp.next.val):
tmp = tmp.next
else:
tmp.next = tmp.next.next
return head
|
Add solution to lintcode question 112
|
Add solution to lintcode question 112
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode question 112
|
"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param head: A ListNode
@return: A ListNode
"""
def deleteDuplicates(self, head):
# write your code here
tmp = head
while (tmp and tmp.next):
if (tmp.val != tmp.next.val):
tmp = tmp.next
else:
tmp.next = tmp.next.next
return head
|
<commit_before><commit_msg>Add solution to lintcode question 112<commit_after>
|
"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param head: A ListNode
@return: A ListNode
"""
def deleteDuplicates(self, head):
# write your code here
tmp = head
while (tmp and tmp.next):
if (tmp.val != tmp.next.val):
tmp = tmp.next
else:
tmp.next = tmp.next.next
return head
|
Add solution to lintcode question 112"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param head: A ListNode
@return: A ListNode
"""
def deleteDuplicates(self, head):
# write your code here
tmp = head
while (tmp and tmp.next):
if (tmp.val != tmp.next.val):
tmp = tmp.next
else:
tmp.next = tmp.next.next
return head
|
<commit_before><commit_msg>Add solution to lintcode question 112<commit_after>"""
Definition of ListNode
class ListNode(object):
def __init__(self, val, next=None):
self.val = val
self.next = next
"""
class Solution:
"""
@param head: A ListNode
@return: A ListNode
"""
def deleteDuplicates(self, head):
# write your code here
tmp = head
while (tmp and tmp.next):
if (tmp.val != tmp.next.val):
tmp = tmp.next
else:
tmp.next = tmp.next.next
return head
|
|
9a10aa2406def7df654710c5b3244218efad95c8
|
examples/cc/14-ccsd_diis.py
|
examples/cc/14-ccsd_diis.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Adjust CCSD DIIS
'''
from pyscf import gto, scf, cc
mol = gto.M(
atom = 'H 0 0 0; F 0 0 1.1',
basis = 'ccpvdz')
mf = scf.RHF(mol).run()
#
# Increase the DIIS space to improve convergence
#
mycc = cc.CCSD(mf)
mycc.diis_space = 10
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
#
# By default, CCSD damps the solution starting from the first iteration.
# In some systems, it'd be better to exclude the CCSD amplitudes of early
# iterations. To start DIIS extrapolation later, you can set diis_start_cycle.
#
mycc.diis_start_cycle = 4
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
|
Add example of CCSD DIIS
|
Add example of CCSD DIIS
|
Python
|
apache-2.0
|
gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf
|
Add example of CCSD DIIS
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Adjust CCSD DIIS
'''
from pyscf import gto, scf, cc
mol = gto.M(
atom = 'H 0 0 0; F 0 0 1.1',
basis = 'ccpvdz')
mf = scf.RHF(mol).run()
#
# Increase the DIIS space to improve convergence
#
mycc = cc.CCSD(mf)
mycc.diis_space = 10
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
#
# By default, CCSD damps the solution starting from the first iteration.
# In some systems, it'd be better to exclude the CCSD amplitudes of early
# iterations. To start DIIS extrapolation later, you can set diis_start_cycle.
#
mycc.diis_start_cycle = 4
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
|
<commit_before><commit_msg>Add example of CCSD DIIS<commit_after>
|
#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Adjust CCSD DIIS
'''
from pyscf import gto, scf, cc
mol = gto.M(
atom = 'H 0 0 0; F 0 0 1.1',
basis = 'ccpvdz')
mf = scf.RHF(mol).run()
#
# Increase the DIIS space to improve convergence
#
mycc = cc.CCSD(mf)
mycc.diis_space = 10
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
#
# By default, CCSD damps the solution starting from the first iteration.
# In some systems, it'd be better to exclude the CCSD amplitudes of early
# iterations. To start DIIS extrapolation later, you can set diis_start_cycle.
#
mycc.diis_start_cycle = 4
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
|
Add example of CCSD DIIS#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Adjust CCSD DIIS
'''
from pyscf import gto, scf, cc
mol = gto.M(
atom = 'H 0 0 0; F 0 0 1.1',
basis = 'ccpvdz')
mf = scf.RHF(mol).run()
#
# Increase the DIIS space to improve convergence
#
mycc = cc.CCSD(mf)
mycc.diis_space = 10
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
#
# By default, CCSD damps the solution starting from the first iteration.
# In some systems, it'd be better to exclude the CCSD amplitudes of early
# iterations. To start DIIS extrapolation later, you can set diis_start_cycle.
#
mycc.diis_start_cycle = 4
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
|
<commit_before><commit_msg>Add example of CCSD DIIS<commit_after>#!/usr/bin/env python
#
# Author: Qiming Sun <osirpt.sun@gmail.com>
#
'''
Adjust CCSD DIIS
'''
from pyscf import gto, scf, cc
mol = gto.M(
atom = 'H 0 0 0; F 0 0 1.1',
basis = 'ccpvdz')
mf = scf.RHF(mol).run()
#
# Increase the DIIS space to improve convergence
#
mycc = cc.CCSD(mf)
mycc.diis_space = 10
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
#
# By default, CCSD damps the solution starting from the first iteration.
# In some systems, it'd be better to exclude the CCSD amplitudes of early
# iterations. To start DIIS extrapolation later, you can set diis_start_cycle.
#
mycc.diis_start_cycle = 4
mycc.kernel()
print('CCSD correlation energy', mycc.e_corr)
|
|
05427969fea7f5d2d528d368f4455dfeabb71b9c
|
test_rulefit.py
|
test_rulefit.py
|
from rulefit import RuleCondition, Rule, RuleEnsemble, RuleFit
import numpy as np
rule_condition_smaller = RuleCondition(1, 5, "<=", 0.4)
rule_condition_greater = RuleCondition(0, 1, ">", 0.1)
X = np.array([[1,2,3], [4,5,6], [7,8,9]])
## Testing RuleCondition
def test_rule_condition_hashing_equal1():
assert (RuleCondition(1, 5, "<=", 0.4) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_equal2():
assert (RuleCondition(1, 5, "<=", 0.5) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different1():
assert (RuleCondition(1, 4, "<=", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(1, 5, ">", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(2, 5, ">", 0.4) != RuleCondition(1, 5, ">", 0.4))
def test_rule_condition_smaller():
np.testing.assert_array_equal(rule_condition_smaller.transform(X),
np.array([1,1,0]))
def test_rule_condition_greater():
np.testing.assert_array_equal(rule_condition_greater.transform(X),
np.array([0,1,1]))
## Testing rule
rule = Rule([rule_condition_smaller, rule_condition_greater])
def test_rule_transform():
np.testing.assert_array_equal(rule.transform(X),
np.array([0,1,0]))
def test_rule_equality():
rule2 = Rule([rule_condition_greater, rule_condition_smaller])
assert rule == rule2
## Test rule extractions function
## TODO
## RuleEnsemble
## - Construct ensemble with 2 short trees and test results
## - Test filter rules with only rules that only have the "<=" operator
## - Test filter short rules
## - Test transform function
|
Add tests for RuleCondition and Rule
|
Add tests for RuleCondition and Rule
|
Python
|
mit
|
christophM/rulefit
|
Add tests for RuleCondition and Rule
|
from rulefit import RuleCondition, Rule, RuleEnsemble, RuleFit
import numpy as np
rule_condition_smaller = RuleCondition(1, 5, "<=", 0.4)
rule_condition_greater = RuleCondition(0, 1, ">", 0.1)
X = np.array([[1,2,3], [4,5,6], [7,8,9]])
## Testing RuleCondition
def test_rule_condition_hashing_equal1():
assert (RuleCondition(1, 5, "<=", 0.4) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_equal2():
assert (RuleCondition(1, 5, "<=", 0.5) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different1():
assert (RuleCondition(1, 4, "<=", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(1, 5, ">", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(2, 5, ">", 0.4) != RuleCondition(1, 5, ">", 0.4))
def test_rule_condition_smaller():
np.testing.assert_array_equal(rule_condition_smaller.transform(X),
np.array([1,1,0]))
def test_rule_condition_greater():
np.testing.assert_array_equal(rule_condition_greater.transform(X),
np.array([0,1,1]))
## Testing rule
rule = Rule([rule_condition_smaller, rule_condition_greater])
def test_rule_transform():
np.testing.assert_array_equal(rule.transform(X),
np.array([0,1,0]))
def test_rule_equality():
rule2 = Rule([rule_condition_greater, rule_condition_smaller])
assert rule == rule2
## Test rule extractions function
## TODO
## RuleEnsemble
## - Construct ensemble with 2 short trees and test results
## - Test filter rules with only rules that only have the "<=" operator
## - Test filter short rules
## - Test transform function
|
<commit_before><commit_msg>Add tests for RuleCondition and Rule<commit_after>
|
from rulefit import RuleCondition, Rule, RuleEnsemble, RuleFit
import numpy as np
rule_condition_smaller = RuleCondition(1, 5, "<=", 0.4)
rule_condition_greater = RuleCondition(0, 1, ">", 0.1)
X = np.array([[1,2,3], [4,5,6], [7,8,9]])
## Testing RuleCondition
def test_rule_condition_hashing_equal1():
assert (RuleCondition(1, 5, "<=", 0.4) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_equal2():
assert (RuleCondition(1, 5, "<=", 0.5) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different1():
assert (RuleCondition(1, 4, "<=", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(1, 5, ">", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(2, 5, ">", 0.4) != RuleCondition(1, 5, ">", 0.4))
def test_rule_condition_smaller():
np.testing.assert_array_equal(rule_condition_smaller.transform(X),
np.array([1,1,0]))
def test_rule_condition_greater():
np.testing.assert_array_equal(rule_condition_greater.transform(X),
np.array([0,1,1]))
## Testing rule
rule = Rule([rule_condition_smaller, rule_condition_greater])
def test_rule_transform():
np.testing.assert_array_equal(rule.transform(X),
np.array([0,1,0]))
def test_rule_equality():
rule2 = Rule([rule_condition_greater, rule_condition_smaller])
assert rule == rule2
## Test rule extractions function
## TODO
## RuleEnsemble
## - Construct ensemble with 2 short trees and test results
## - Test filter rules with only rules that only have the "<=" operator
## - Test filter short rules
## - Test transform function
|
Add tests for RuleCondition and Rulefrom rulefit import RuleCondition, Rule, RuleEnsemble, RuleFit
import numpy as np
rule_condition_smaller = RuleCondition(1, 5, "<=", 0.4)
rule_condition_greater = RuleCondition(0, 1, ">", 0.1)
X = np.array([[1,2,3], [4,5,6], [7,8,9]])
## Testing RuleCondition
def test_rule_condition_hashing_equal1():
assert (RuleCondition(1, 5, "<=", 0.4) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_equal2():
assert (RuleCondition(1, 5, "<=", 0.5) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different1():
assert (RuleCondition(1, 4, "<=", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(1, 5, ">", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(2, 5, ">", 0.4) != RuleCondition(1, 5, ">", 0.4))
def test_rule_condition_smaller():
np.testing.assert_array_equal(rule_condition_smaller.transform(X),
np.array([1,1,0]))
def test_rule_condition_greater():
np.testing.assert_array_equal(rule_condition_greater.transform(X),
np.array([0,1,1]))
## Testing rule
rule = Rule([rule_condition_smaller, rule_condition_greater])
def test_rule_transform():
np.testing.assert_array_equal(rule.transform(X),
np.array([0,1,0]))
def test_rule_equality():
rule2 = Rule([rule_condition_greater, rule_condition_smaller])
assert rule == rule2
## Test rule extractions function
## TODO
## RuleEnsemble
## - Construct ensemble with 2 short trees and test results
## - Test filter rules with only rules that only have the "<=" operator
## - Test filter short rules
## - Test transform function
|
<commit_before><commit_msg>Add tests for RuleCondition and Rule<commit_after>from rulefit import RuleCondition, Rule, RuleEnsemble, RuleFit
import numpy as np
rule_condition_smaller = RuleCondition(1, 5, "<=", 0.4)
rule_condition_greater = RuleCondition(0, 1, ">", 0.1)
X = np.array([[1,2,3], [4,5,6], [7,8,9]])
## Testing RuleCondition
def test_rule_condition_hashing_equal1():
assert (RuleCondition(1, 5, "<=", 0.4) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_equal2():
assert (RuleCondition(1, 5, "<=", 0.5) == RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different1():
assert (RuleCondition(1, 4, "<=", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(1, 5, ">", 0.4) != RuleCondition(1, 5, "<=", 0.4))
def test_rule_condition_hashing_different2():
assert (RuleCondition(2, 5, ">", 0.4) != RuleCondition(1, 5, ">", 0.4))
def test_rule_condition_smaller():
np.testing.assert_array_equal(rule_condition_smaller.transform(X),
np.array([1,1,0]))
def test_rule_condition_greater():
np.testing.assert_array_equal(rule_condition_greater.transform(X),
np.array([0,1,1]))
## Testing rule
rule = Rule([rule_condition_smaller, rule_condition_greater])
def test_rule_transform():
np.testing.assert_array_equal(rule.transform(X),
np.array([0,1,0]))
def test_rule_equality():
rule2 = Rule([rule_condition_greater, rule_condition_smaller])
assert rule == rule2
## Test rule extractions function
## TODO
## RuleEnsemble
## - Construct ensemble with 2 short trees and test results
## - Test filter rules with only rules that only have the "<=" operator
## - Test filter short rules
## - Test transform function
|
|
8a120852938247f53f0eb793932ef7b13db9f577
|
tools/primes.py
|
tools/primes.py
|
primes = [2, 3, 5, 7]
safe = True
muliplier = 1
for p in primes:
muliplier *= p
offsets = []
for x in range(3, muliplier + 3, 2):
prime = True
for p in primes:
if not x % p or (safe and not ((x - 1) / 2) % p):
prime = False
break
if prime:
offsets.append(x)
print(offsets)
print(len(offsets))
print(muliplier)
|
Add python script to generate the bits needed for the prime generator.
|
Add python script to generate the bits needed for the prime generator.
|
Python
|
apache-2.0
|
openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl,openssl/openssl
|
Add python script to generate the bits needed for the prime generator.
|
primes = [2, 3, 5, 7]
safe = True
muliplier = 1
for p in primes:
muliplier *= p
offsets = []
for x in range(3, muliplier + 3, 2):
prime = True
for p in primes:
if not x % p or (safe and not ((x - 1) / 2) % p):
prime = False
break
if prime:
offsets.append(x)
print(offsets)
print(len(offsets))
print(muliplier)
|
<commit_before><commit_msg>Add python script to generate the bits needed for the prime generator.<commit_after>
|
primes = [2, 3, 5, 7]
safe = True
muliplier = 1
for p in primes:
muliplier *= p
offsets = []
for x in range(3, muliplier + 3, 2):
prime = True
for p in primes:
if not x % p or (safe and not ((x - 1) / 2) % p):
prime = False
break
if prime:
offsets.append(x)
print(offsets)
print(len(offsets))
print(muliplier)
|
Add python script to generate the bits needed for the prime generator.primes = [2, 3, 5, 7]
safe = True
muliplier = 1
for p in primes:
muliplier *= p
offsets = []
for x in range(3, muliplier + 3, 2):
prime = True
for p in primes:
if not x % p or (safe and not ((x - 1) / 2) % p):
prime = False
break
if prime:
offsets.append(x)
print(offsets)
print(len(offsets))
print(muliplier)
|
<commit_before><commit_msg>Add python script to generate the bits needed for the prime generator.<commit_after>primes = [2, 3, 5, 7]
safe = True
muliplier = 1
for p in primes:
muliplier *= p
offsets = []
for x in range(3, muliplier + 3, 2):
prime = True
for p in primes:
if not x % p or (safe and not ((x - 1) / 2) % p):
prime = False
break
if prime:
offsets.append(x)
print(offsets)
print(len(offsets))
print(muliplier)
|
|
2594a273d4248289ac839b0c436ca3c1d115008d
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
from scrapyd_api.exceptions import ScrapydError
def test_scrapyd_error():
err = ScrapydError()
assert repr(err) == 'ScrapydError("Scrapyd Error")'
err_with_detail = ScrapydError(detail='Something went wrong')
assert repr(err_with_detail) == 'ScrapydError("Something went wrong")'
|
Add a test to check the ScrapydError's repr().
|
Add a test to check the ScrapydError's repr().
Takes us to 100% coverage.
|
Python
|
bsd-2-clause
|
djm/python-scrapyd-api
|
Add a test to check the ScrapydError's repr().
Takes us to 100% coverage.
|
from scrapyd_api.exceptions import ScrapydError
def test_scrapyd_error():
err = ScrapydError()
assert repr(err) == 'ScrapydError("Scrapyd Error")'
err_with_detail = ScrapydError(detail='Something went wrong')
assert repr(err_with_detail) == 'ScrapydError("Something went wrong")'
|
<commit_before><commit_msg>Add a test to check the ScrapydError's repr().
Takes us to 100% coverage.<commit_after>
|
from scrapyd_api.exceptions import ScrapydError
def test_scrapyd_error():
err = ScrapydError()
assert repr(err) == 'ScrapydError("Scrapyd Error")'
err_with_detail = ScrapydError(detail='Something went wrong')
assert repr(err_with_detail) == 'ScrapydError("Something went wrong")'
|
Add a test to check the ScrapydError's repr().
Takes us to 100% coverage.from scrapyd_api.exceptions import ScrapydError
def test_scrapyd_error():
err = ScrapydError()
assert repr(err) == 'ScrapydError("Scrapyd Error")'
err_with_detail = ScrapydError(detail='Something went wrong')
assert repr(err_with_detail) == 'ScrapydError("Something went wrong")'
|
<commit_before><commit_msg>Add a test to check the ScrapydError's repr().
Takes us to 100% coverage.<commit_after>from scrapyd_api.exceptions import ScrapydError
def test_scrapyd_error():
err = ScrapydError()
assert repr(err) == 'ScrapydError("Scrapyd Error")'
err_with_detail = ScrapydError(detail='Something went wrong')
assert repr(err_with_detail) == 'ScrapydError("Something went wrong")'
|
|
e12b8b81b012bf2a459e2df2faa64057164cf968
|
py/is-subsequence.py
|
py/is-subsequence.py
|
class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
x = 0
for i, c in enumerate(t):
if x == len(s):
return True
if c == s[x]:
x += 1
return x == len(s)
|
Add py solution for 392. Is Subsequence
|
Add py solution for 392. Is Subsequence
392. Is Subsequence: https://leetcode.com/problems/is-subsequence/
Approach:
Observe the first item remaining in each step. The value will be added
1 << step either the remaining count is odd or it's a left-to-right
step. Hence the n | 0x55555.. is the key.
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 392. Is Subsequence
392. Is Subsequence: https://leetcode.com/problems/is-subsequence/
Approach:
Observe the first item remaining in each step. The value will be added
1 << step either the remaining count is odd or it's a left-to-right
step. Hence the n | 0x55555.. is the key.
|
class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
x = 0
for i, c in enumerate(t):
if x == len(s):
return True
if c == s[x]:
x += 1
return x == len(s)
|
<commit_before><commit_msg>Add py solution for 392. Is Subsequence
392. Is Subsequence: https://leetcode.com/problems/is-subsequence/
Approach:
Observe the first item remaining in each step. The value will be added
1 << step either the remaining count is odd or it's a left-to-right
step. Hence the n | 0x55555.. is the key.<commit_after>
|
class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
x = 0
for i, c in enumerate(t):
if x == len(s):
return True
if c == s[x]:
x += 1
return x == len(s)
|
Add py solution for 392. Is Subsequence
392. Is Subsequence: https://leetcode.com/problems/is-subsequence/
Approach:
Observe the first item remaining in each step. The value will be added
1 << step either the remaining count is odd or it's a left-to-right
step. Hence the n | 0x55555.. is the key.class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
x = 0
for i, c in enumerate(t):
if x == len(s):
return True
if c == s[x]:
x += 1
return x == len(s)
|
<commit_before><commit_msg>Add py solution for 392. Is Subsequence
392. Is Subsequence: https://leetcode.com/problems/is-subsequence/
Approach:
Observe the first item remaining in each step. The value will be added
1 << step either the remaining count is odd or it's a left-to-right
step. Hence the n | 0x55555.. is the key.<commit_after>class Solution(object):
def isSubsequence(self, s, t):
"""
:type s: str
:type t: str
:rtype: bool
"""
x = 0
for i, c in enumerate(t):
if x == len(s):
return True
if c == s[x]:
x += 1
return x == len(s)
|
|
a8200dbb889281efe9d39cca446bcd62fcb26a67
|
py/construct-string-from-binary-tree.py
|
py/construct-string-from-binary-tree.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def tree2str(self, t):
"""
:type t: TreeNode
:rtype: str
"""
if not t:
return ""
ret = [str(t.val)]
if t.left or t.right:
ret.append("(")
ret.append(self.tree2str(t.left))
ret.append(")")
if t.right:
ret.append("(")
ret.append(self.tree2str(t.right))
ret.append(")")
return ''.join(ret)
|
Add py solution for 606. Construct String from Binary Tree
|
Add py solution for 606. Construct String from Binary Tree
606. Construct String from Binary Tree: https://leetcode.com/problems/construct-string-from-binary-tree/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 606. Construct String from Binary Tree
606. Construct String from Binary Tree: https://leetcode.com/problems/construct-string-from-binary-tree/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def tree2str(self, t):
"""
:type t: TreeNode
:rtype: str
"""
if not t:
return ""
ret = [str(t.val)]
if t.left or t.right:
ret.append("(")
ret.append(self.tree2str(t.left))
ret.append(")")
if t.right:
ret.append("(")
ret.append(self.tree2str(t.right))
ret.append(")")
return ''.join(ret)
|
<commit_before><commit_msg>Add py solution for 606. Construct String from Binary Tree
606. Construct String from Binary Tree: https://leetcode.com/problems/construct-string-from-binary-tree/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def tree2str(self, t):
"""
:type t: TreeNode
:rtype: str
"""
if not t:
return ""
ret = [str(t.val)]
if t.left or t.right:
ret.append("(")
ret.append(self.tree2str(t.left))
ret.append(")")
if t.right:
ret.append("(")
ret.append(self.tree2str(t.right))
ret.append(")")
return ''.join(ret)
|
Add py solution for 606. Construct String from Binary Tree
606. Construct String from Binary Tree: https://leetcode.com/problems/construct-string-from-binary-tree/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def tree2str(self, t):
"""
:type t: TreeNode
:rtype: str
"""
if not t:
return ""
ret = [str(t.val)]
if t.left or t.right:
ret.append("(")
ret.append(self.tree2str(t.left))
ret.append(")")
if t.right:
ret.append("(")
ret.append(self.tree2str(t.right))
ret.append(")")
return ''.join(ret)
|
<commit_before><commit_msg>Add py solution for 606. Construct String from Binary Tree
606. Construct String from Binary Tree: https://leetcode.com/problems/construct-string-from-binary-tree/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def tree2str(self, t):
"""
:type t: TreeNode
:rtype: str
"""
if not t:
return ""
ret = [str(t.val)]
if t.left or t.right:
ret.append("(")
ret.append(self.tree2str(t.left))
ret.append(")")
if t.right:
ret.append("(")
ret.append(self.tree2str(t.right))
ret.append(")")
return ''.join(ret)
|
|
234f55355e26f9a549cca75051ad369f557184ae
|
migrations/versions/5_add_oai_table.py
|
migrations/versions/5_add_oai_table.py
|
""" Add table for OAI repositories
Revision ID: 71874271208e
Revises: a80b4f777c12
Create Date: 2017-01-15 21:47:58.946488
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '71874271208e'
down_revision = 'a80b4f777c12'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'oai_repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('endpoint', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('endpoint'))
def downgrade():
op.drop_table('oai_repository')
|
Add table for OAI repositories
|
Add table for OAI repositories
|
Python
|
agpl-3.0
|
jbaiter/demetsiiify,jbaiter/demetsiiify,jbaiter/demetsiiify
|
Add table for OAI repositories
|
""" Add table for OAI repositories
Revision ID: 71874271208e
Revises: a80b4f777c12
Create Date: 2017-01-15 21:47:58.946488
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '71874271208e'
down_revision = 'a80b4f777c12'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'oai_repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('endpoint', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('endpoint'))
def downgrade():
op.drop_table('oai_repository')
|
<commit_before><commit_msg>Add table for OAI repositories<commit_after>
|
""" Add table for OAI repositories
Revision ID: 71874271208e
Revises: a80b4f777c12
Create Date: 2017-01-15 21:47:58.946488
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '71874271208e'
down_revision = 'a80b4f777c12'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'oai_repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('endpoint', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('endpoint'))
def downgrade():
op.drop_table('oai_repository')
|
Add table for OAI repositories""" Add table for OAI repositories
Revision ID: 71874271208e
Revises: a80b4f777c12
Create Date: 2017-01-15 21:47:58.946488
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '71874271208e'
down_revision = 'a80b4f777c12'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'oai_repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('endpoint', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('endpoint'))
def downgrade():
op.drop_table('oai_repository')
|
<commit_before><commit_msg>Add table for OAI repositories<commit_after>""" Add table for OAI repositories
Revision ID: 71874271208e
Revises: a80b4f777c12
Create Date: 2017-01-15 21:47:58.946488
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '71874271208e'
down_revision = 'a80b4f777c12'
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
'oai_repository',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('endpoint', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('endpoint'))
def downgrade():
op.drop_table('oai_repository')
|
|
1a776b7fd58f7936cf37ee5e0e4e1b2a4bf5ff3e
|
src/tempel/utils.py
|
src/tempel/utils.py
|
from django.conf import settings
languages = dict([(item['name'], item) for item in settings.TEMPEL_LANGUAGES])
def get_languages():
return sorted([(item['name'], item['label']) for item in languages.values()])
def get_language(name):
return languages[name]['label']
def get_mimetype(name):
return languages[name]['mime']
def get_extension(name):
return languages[name]['ext']
|
Add utility functions to query the languages
|
Add utility functions to query the languages
|
Python
|
agpl-3.0
|
fajran/tempel
|
Add utility functions to query the languages
|
from django.conf import settings
languages = dict([(item['name'], item) for item in settings.TEMPEL_LANGUAGES])
def get_languages():
return sorted([(item['name'], item['label']) for item in languages.values()])
def get_language(name):
return languages[name]['label']
def get_mimetype(name):
return languages[name]['mime']
def get_extension(name):
return languages[name]['ext']
|
<commit_before><commit_msg>Add utility functions to query the languages<commit_after>
|
from django.conf import settings
languages = dict([(item['name'], item) for item in settings.TEMPEL_LANGUAGES])
def get_languages():
return sorted([(item['name'], item['label']) for item in languages.values()])
def get_language(name):
return languages[name]['label']
def get_mimetype(name):
return languages[name]['mime']
def get_extension(name):
return languages[name]['ext']
|
Add utility functions to query the languagesfrom django.conf import settings
languages = dict([(item['name'], item) for item in settings.TEMPEL_LANGUAGES])
def get_languages():
return sorted([(item['name'], item['label']) for item in languages.values()])
def get_language(name):
return languages[name]['label']
def get_mimetype(name):
return languages[name]['mime']
def get_extension(name):
return languages[name]['ext']
|
<commit_before><commit_msg>Add utility functions to query the languages<commit_after>from django.conf import settings
languages = dict([(item['name'], item) for item in settings.TEMPEL_LANGUAGES])
def get_languages():
return sorted([(item['name'], item['label']) for item in languages.values()])
def get_language(name):
return languages[name]['label']
def get_mimetype(name):
return languages[name]['mime']
def get_extension(name):
return languages[name]['ext']
|
|
d88fde11ed648da8af7ee0bf81a635f72d26fe9c
|
tests/test_search.py
|
tests/test_search.py
|
from . import TestCase
from memopol.search.templatetags.search_tags import simple_search_shortcut
class TestSearchTemplateTags(TestCase):
def test_simple_search_shortcut(self):
url = simple_search_shortcut('country:FR or country:BR')
self.assertEqual(url, "/search/?q=country%3AFR%20or%20country%3ABR")
|
Add a basic test for simple_search_shortcut templatetag
|
[enh] Add a basic test for simple_search_shortcut templatetag
|
Python
|
agpl-3.0
|
yohanboniface/memopol-core,yohanboniface/memopol-core,yohanboniface/memopol-core
|
[enh] Add a basic test for simple_search_shortcut templatetag
|
from . import TestCase
from memopol.search.templatetags.search_tags import simple_search_shortcut
class TestSearchTemplateTags(TestCase):
def test_simple_search_shortcut(self):
url = simple_search_shortcut('country:FR or country:BR')
self.assertEqual(url, "/search/?q=country%3AFR%20or%20country%3ABR")
|
<commit_before><commit_msg>[enh] Add a basic test for simple_search_shortcut templatetag<commit_after>
|
from . import TestCase
from memopol.search.templatetags.search_tags import simple_search_shortcut
class TestSearchTemplateTags(TestCase):
def test_simple_search_shortcut(self):
url = simple_search_shortcut('country:FR or country:BR')
self.assertEqual(url, "/search/?q=country%3AFR%20or%20country%3ABR")
|
[enh] Add a basic test for simple_search_shortcut templatetagfrom . import TestCase
from memopol.search.templatetags.search_tags import simple_search_shortcut
class TestSearchTemplateTags(TestCase):
def test_simple_search_shortcut(self):
url = simple_search_shortcut('country:FR or country:BR')
self.assertEqual(url, "/search/?q=country%3AFR%20or%20country%3ABR")
|
<commit_before><commit_msg>[enh] Add a basic test for simple_search_shortcut templatetag<commit_after>from . import TestCase
from memopol.search.templatetags.search_tags import simple_search_shortcut
class TestSearchTemplateTags(TestCase):
def test_simple_search_shortcut(self):
url = simple_search_shortcut('country:FR or country:BR')
self.assertEqual(url, "/search/?q=country%3AFR%20or%20country%3ABR")
|
|
e3248ba4bca04b434414570dc438547d8770adc9
|
tools/ocd_restore.py
|
tools/ocd_restore.py
|
#!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
|
Add basics for a restore script
|
Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)
|
Python
|
bsd-3-clause
|
rshorey/pupa,mileswwatkins/pupa,datamade/pupa,rshorey/pupa,opencivicdata/pupa,influence-usa/pupa,mileswwatkins/pupa,influence-usa/pupa,opencivicdata/pupa,datamade/pupa
|
Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)
|
#!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
|
<commit_before><commit_msg>Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)<commit_after>
|
#!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
|
Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)#!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
|
<commit_before><commit_msg>Add basics for a restore script
(Yes, sadly, still debugging, need the prod db)<commit_after>#!/usr/bin/env python
from pupa.utils import JSONEncoderPlus
from contextlib import contextmanager
from pymongo import Connection
import argparse
import json
import os
parser = argparse.ArgumentParser(description='Re-convert a jurisdiction.')
parser.add_argument('--server', type=str, help='Mongo Server',
default="localhost")
parser.add_argument('--database', type=str, help='Mongo Database',
default="opencivicdata")
parser.add_argument('--port', type=int, help='Mongo Server Port',
default=27017)
parser.add_argument('--output', type=str, help='Output Directory',
default="dump")
parser.add_argument('root', type=str, help='root', default='dump')
args = parser.parse_args()
connection = Connection(args.server, args.port)
db = getattr(connection, args.database)
jurisdiction = args.jurisdiction
@contextmanager
def cd(path):
pop = os.getcwd()
os.chdir(path)
try:
yield path
finally:
os.chdir(pop)
with cd(args.root):
print os.getcwd()
|
|
1ed54b7b799430f9aac67759b1e3d5bed2b59d8b
|
tests/test_context.py
|
tests/test_context.py
|
from dmaws.context import Context
class TestContext(object):
def test_empty_context(self):
ctx = Context()
assert ctx.stage is None
assert ctx.environment is None
def test_add_apps(self):
ctx = Context()
ctx.add_apps(['api', 'aws'])
assert ctx.apps == ['api', 'aws']
def test_add_apps_string(self):
ctx = Context()
ctx.add_apps('api')
assert ctx.apps == ['api']
def test_add_apps_dash_to_underscore(self):
ctx = Context()
ctx.add_apps(['api', 'search-api'])
assert ctx.apps == ['api', 'search_api']
def test_add_variables(self):
ctx = Context()
ctx.add_variables({'a': 1})
ctx.add_variables({'b': 2})
ctx.add_variables({'a': 2})
assert ctx.variables == {'a': 2, 'b': 2}
def test_add_dotted_variable(self):
ctx = Context()
ctx.add_dotted_variable('a.b.c', 1)
assert ctx.variables == {'a': {'b': {'c': 1}}}
|
Add some tests for Context
|
Add some tests for Context
|
Python
|
mit
|
alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws,alphagov/digitalmarketplace-aws
|
Add some tests for Context
|
from dmaws.context import Context
class TestContext(object):
def test_empty_context(self):
ctx = Context()
assert ctx.stage is None
assert ctx.environment is None
def test_add_apps(self):
ctx = Context()
ctx.add_apps(['api', 'aws'])
assert ctx.apps == ['api', 'aws']
def test_add_apps_string(self):
ctx = Context()
ctx.add_apps('api')
assert ctx.apps == ['api']
def test_add_apps_dash_to_underscore(self):
ctx = Context()
ctx.add_apps(['api', 'search-api'])
assert ctx.apps == ['api', 'search_api']
def test_add_variables(self):
ctx = Context()
ctx.add_variables({'a': 1})
ctx.add_variables({'b': 2})
ctx.add_variables({'a': 2})
assert ctx.variables == {'a': 2, 'b': 2}
def test_add_dotted_variable(self):
ctx = Context()
ctx.add_dotted_variable('a.b.c', 1)
assert ctx.variables == {'a': {'b': {'c': 1}}}
|
<commit_before><commit_msg>Add some tests for Context<commit_after>
|
from dmaws.context import Context
class TestContext(object):
def test_empty_context(self):
ctx = Context()
assert ctx.stage is None
assert ctx.environment is None
def test_add_apps(self):
ctx = Context()
ctx.add_apps(['api', 'aws'])
assert ctx.apps == ['api', 'aws']
def test_add_apps_string(self):
ctx = Context()
ctx.add_apps('api')
assert ctx.apps == ['api']
def test_add_apps_dash_to_underscore(self):
ctx = Context()
ctx.add_apps(['api', 'search-api'])
assert ctx.apps == ['api', 'search_api']
def test_add_variables(self):
ctx = Context()
ctx.add_variables({'a': 1})
ctx.add_variables({'b': 2})
ctx.add_variables({'a': 2})
assert ctx.variables == {'a': 2, 'b': 2}
def test_add_dotted_variable(self):
ctx = Context()
ctx.add_dotted_variable('a.b.c', 1)
assert ctx.variables == {'a': {'b': {'c': 1}}}
|
Add some tests for Contextfrom dmaws.context import Context
class TestContext(object):
def test_empty_context(self):
ctx = Context()
assert ctx.stage is None
assert ctx.environment is None
def test_add_apps(self):
ctx = Context()
ctx.add_apps(['api', 'aws'])
assert ctx.apps == ['api', 'aws']
def test_add_apps_string(self):
ctx = Context()
ctx.add_apps('api')
assert ctx.apps == ['api']
def test_add_apps_dash_to_underscore(self):
ctx = Context()
ctx.add_apps(['api', 'search-api'])
assert ctx.apps == ['api', 'search_api']
def test_add_variables(self):
ctx = Context()
ctx.add_variables({'a': 1})
ctx.add_variables({'b': 2})
ctx.add_variables({'a': 2})
assert ctx.variables == {'a': 2, 'b': 2}
def test_add_dotted_variable(self):
ctx = Context()
ctx.add_dotted_variable('a.b.c', 1)
assert ctx.variables == {'a': {'b': {'c': 1}}}
|
<commit_before><commit_msg>Add some tests for Context<commit_after>from dmaws.context import Context
class TestContext(object):
def test_empty_context(self):
ctx = Context()
assert ctx.stage is None
assert ctx.environment is None
def test_add_apps(self):
ctx = Context()
ctx.add_apps(['api', 'aws'])
assert ctx.apps == ['api', 'aws']
def test_add_apps_string(self):
ctx = Context()
ctx.add_apps('api')
assert ctx.apps == ['api']
def test_add_apps_dash_to_underscore(self):
ctx = Context()
ctx.add_apps(['api', 'search-api'])
assert ctx.apps == ['api', 'search_api']
def test_add_variables(self):
ctx = Context()
ctx.add_variables({'a': 1})
ctx.add_variables({'b': 2})
ctx.add_variables({'a': 2})
assert ctx.variables == {'a': 2, 'b': 2}
def test_add_dotted_variable(self):
ctx = Context()
ctx.add_dotted_variable('a.b.c', 1)
assert ctx.variables == {'a': {'b': {'c': 1}}}
|
|
4f66208343c29226bdb549c2b1d6d15cd2ab985e
|
tests/twisted/presence/initial-presence.py
|
tests/twisted/presence/initial-presence.py
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186XmlStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
exec_test(test, protocol=ValidInvisibleListStream)
exec_test(test, protocol=Xep0186XmlStream)
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186Stream, \
Xep0186AndValidInvisibleListStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
for protocol in [ValidInvisibleListStream, Xep0186Stream,
Xep0186AndValidInvisibleListStream]:
exec_test(test, protocol=protocol)
|
Add hybrid (XEP-0126 & XEP-0186) service to initial presence test.
|
Add hybrid (XEP-0126 & XEP-0186) service to initial presence test.
|
Python
|
lgpl-2.1
|
Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186XmlStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
exec_test(test, protocol=ValidInvisibleListStream)
exec_test(test, protocol=Xep0186XmlStream)
Add hybrid (XEP-0126 & XEP-0186) service to initial presence test.
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186Stream, \
Xep0186AndValidInvisibleListStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
for protocol in [ValidInvisibleListStream, Xep0186Stream,
Xep0186AndValidInvisibleListStream]:
exec_test(test, protocol=protocol)
|
<commit_before>"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186XmlStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
exec_test(test, protocol=ValidInvisibleListStream)
exec_test(test, protocol=Xep0186XmlStream)
<commit_msg>Add hybrid (XEP-0126 & XEP-0186) service to initial presence test.<commit_after>
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186Stream, \
Xep0186AndValidInvisibleListStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
for protocol in [ValidInvisibleListStream, Xep0186Stream,
Xep0186AndValidInvisibleListStream]:
exec_test(test, protocol=protocol)
|
"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186XmlStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
exec_test(test, protocol=ValidInvisibleListStream)
exec_test(test, protocol=Xep0186XmlStream)
Add hybrid (XEP-0126 & XEP-0186) service to initial presence test."""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186Stream, \
Xep0186AndValidInvisibleListStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
for protocol in [ValidInvisibleListStream, Xep0186Stream,
Xep0186AndValidInvisibleListStream]:
exec_test(test, protocol=protocol)
|
<commit_before>"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186XmlStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
exec_test(test, protocol=ValidInvisibleListStream)
exec_test(test, protocol=Xep0186XmlStream)
<commit_msg>Add hybrid (XEP-0126 & XEP-0186) service to initial presence test.<commit_after>"""
Tests setting your own presence before calling Connect(), allowing the user to
sign in as Busy/Invisible/whatever rather than available.
"""
from twisted.words.xish import domish
from gabbletest import exec_test
from servicetest import EventPattern, assertEquals, assertNotEquals
import ns
import constants as cs
from invisible_helper import ValidInvisibleListStream, Xep0186Stream, \
Xep0186AndValidInvisibleListStream
def test(q, bus, conn, stream):
props = conn.Properties.GetAll(cs.CONN_IFACE_SIMPLE_PRESENCE)
assertNotEquals({}, props['Statuses'])
conn.SimplePresence.SetPresence("away", "watching bees")
conn.Connect()
_, presence = q.expect_many(
EventPattern('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTED, cs.CSR_REQUESTED]),
EventPattern('stream-presence'),
)
children = list(presence.stanza.elements())
assertEquals('show', children[0].name)
assertEquals('away', children[0].children[0])
assertEquals('status', children[1].name)
assertEquals('watching bees', children[1].children[0])
if __name__ == '__main__':
exec_test(test)
for protocol in [ValidInvisibleListStream, Xep0186Stream,
Xep0186AndValidInvisibleListStream]:
exec_test(test, protocol=protocol)
|
6b7b67f2233df86ac583952302baabf491ee0f8d
|
create-graph.py
|
create-graph.py
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = {'date': ['2014-05-01 18:47:05.069722', '2014-05-01 18:47:05.119994', '2014-05-02 18:47:05.178768', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.280592', '2014-05-03 18:47:05.332662', '2014-05-03 18:47:05.385109', '2014-05-04 18:47:05.436523', '2014-05-04 18:47:05.486877'],
'battle_deaths': [34, 25, 26, 15, 15, 14, 26, 25, 62, 41]}
df = pd.DataFrame(data, columns = ['date', 'battle_deaths'])
df['date'] = pd.to_datetime(df['date'])
df.index = df['date']
del df['date']
df.resample('D').sum().plot()
|
Add example of creating a pandas graph
|
Add example of creating a pandas graph
|
Python
|
mit
|
RobBollons/utilities
|
Add example of creating a pandas graph
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = {'date': ['2014-05-01 18:47:05.069722', '2014-05-01 18:47:05.119994', '2014-05-02 18:47:05.178768', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.280592', '2014-05-03 18:47:05.332662', '2014-05-03 18:47:05.385109', '2014-05-04 18:47:05.436523', '2014-05-04 18:47:05.486877'],
'battle_deaths': [34, 25, 26, 15, 15, 14, 26, 25, 62, 41]}
df = pd.DataFrame(data, columns = ['date', 'battle_deaths'])
df['date'] = pd.to_datetime(df['date'])
df.index = df['date']
del df['date']
df.resample('D').sum().plot()
|
<commit_before><commit_msg>Add example of creating a pandas graph<commit_after>
|
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = {'date': ['2014-05-01 18:47:05.069722', '2014-05-01 18:47:05.119994', '2014-05-02 18:47:05.178768', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.280592', '2014-05-03 18:47:05.332662', '2014-05-03 18:47:05.385109', '2014-05-04 18:47:05.436523', '2014-05-04 18:47:05.486877'],
'battle_deaths': [34, 25, 26, 15, 15, 14, 26, 25, 62, 41]}
df = pd.DataFrame(data, columns = ['date', 'battle_deaths'])
df['date'] = pd.to_datetime(df['date'])
df.index = df['date']
del df['date']
df.resample('D').sum().plot()
|
Add example of creating a pandas graphimport pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = {'date': ['2014-05-01 18:47:05.069722', '2014-05-01 18:47:05.119994', '2014-05-02 18:47:05.178768', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.280592', '2014-05-03 18:47:05.332662', '2014-05-03 18:47:05.385109', '2014-05-04 18:47:05.436523', '2014-05-04 18:47:05.486877'],
'battle_deaths': [34, 25, 26, 15, 15, 14, 26, 25, 62, 41]}
df = pd.DataFrame(data, columns = ['date', 'battle_deaths'])
df['date'] = pd.to_datetime(df['date'])
df.index = df['date']
del df['date']
df.resample('D').sum().plot()
|
<commit_before><commit_msg>Add example of creating a pandas graph<commit_after>import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
data = {'date': ['2014-05-01 18:47:05.069722', '2014-05-01 18:47:05.119994', '2014-05-02 18:47:05.178768', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.230071', '2014-05-02 18:47:05.280592', '2014-05-03 18:47:05.332662', '2014-05-03 18:47:05.385109', '2014-05-04 18:47:05.436523', '2014-05-04 18:47:05.486877'],
'battle_deaths': [34, 25, 26, 15, 15, 14, 26, 25, 62, 41]}
df = pd.DataFrame(data, columns = ['date', 'battle_deaths'])
df['date'] = pd.to_datetime(df['date'])
df.index = df['date']
del df['date']
df.resample('D').sum().plot()
|
|
3bba7a50dbb42f7a89975396081291cf8bf2ae55
|
nipype/algorithms/tests/test_auto_Overlap.py
|
nipype/algorithms/tests/test_auto_Overlap.py
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.algorithms.misc import Overlap
def test_Overlap_inputs():
input_map = dict(bg_overlap=dict(mandatory=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
mask_volume=dict(),
out_file=dict(usedefault=True,
),
vol_units=dict(mandatory=True,
usedefault=True,
),
volume1=dict(mandatory=True,
),
volume2=dict(mandatory=True,
),
weighting=dict(usedefault=True,
),
)
inputs = Overlap.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Overlap_outputs():
output_map = dict(dice=dict(),
diff_file=dict(),
jaccard=dict(),
labels=dict(),
roi_di=dict(),
roi_ji=dict(),
roi_voldiff=dict(),
volume_difference=dict(),
)
outputs = Overlap.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
Test overlap not included before
|
Test overlap not included before
|
Python
|
bsd-3-clause
|
wanderine/nipype,mick-d/nipype,sgiavasis/nipype,gerddie/nipype,pearsonlab/nipype,iglpdc/nipype,carolFrohlich/nipype,dgellis90/nipype,mick-d/nipype,wanderine/nipype,wanderine/nipype,wanderine/nipype,Leoniela/nipype,grlee77/nipype,pearsonlab/nipype,pearsonlab/nipype,sgiavasis/nipype,carolFrohlich/nipype,dgellis90/nipype,JohnGriffiths/nipype,FCP-INDI/nipype,Leoniela/nipype,carolFrohlich/nipype,FCP-INDI/nipype,grlee77/nipype,glatard/nipype,iglpdc/nipype,glatard/nipype,gerddie/nipype,blakedewey/nipype,Leoniela/nipype,mick-d/nipype,FCP-INDI/nipype,gerddie/nipype,arokem/nipype,glatard/nipype,JohnGriffiths/nipype,JohnGriffiths/nipype,dgellis90/nipype,sgiavasis/nipype,gerddie/nipype,sgiavasis/nipype,blakedewey/nipype,dgellis90/nipype,FCP-INDI/nipype,grlee77/nipype,pearsonlab/nipype,mick-d/nipype,arokem/nipype,carolFrohlich/nipype,blakedewey/nipype,iglpdc/nipype,glatard/nipype,arokem/nipype,arokem/nipype,grlee77/nipype,JohnGriffiths/nipype,blakedewey/nipype,iglpdc/nipype
|
Test overlap not included before
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.algorithms.misc import Overlap
def test_Overlap_inputs():
input_map = dict(bg_overlap=dict(mandatory=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
mask_volume=dict(),
out_file=dict(usedefault=True,
),
vol_units=dict(mandatory=True,
usedefault=True,
),
volume1=dict(mandatory=True,
),
volume2=dict(mandatory=True,
),
weighting=dict(usedefault=True,
),
)
inputs = Overlap.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Overlap_outputs():
output_map = dict(dice=dict(),
diff_file=dict(),
jaccard=dict(),
labels=dict(),
roi_di=dict(),
roi_ji=dict(),
roi_voldiff=dict(),
volume_difference=dict(),
)
outputs = Overlap.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
<commit_before><commit_msg>Test overlap not included before<commit_after>
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.algorithms.misc import Overlap
def test_Overlap_inputs():
input_map = dict(bg_overlap=dict(mandatory=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
mask_volume=dict(),
out_file=dict(usedefault=True,
),
vol_units=dict(mandatory=True,
usedefault=True,
),
volume1=dict(mandatory=True,
),
volume2=dict(mandatory=True,
),
weighting=dict(usedefault=True,
),
)
inputs = Overlap.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Overlap_outputs():
output_map = dict(dice=dict(),
diff_file=dict(),
jaccard=dict(),
labels=dict(),
roi_di=dict(),
roi_ji=dict(),
roi_voldiff=dict(),
volume_difference=dict(),
)
outputs = Overlap.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
Test overlap not included before# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.algorithms.misc import Overlap
def test_Overlap_inputs():
input_map = dict(bg_overlap=dict(mandatory=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
mask_volume=dict(),
out_file=dict(usedefault=True,
),
vol_units=dict(mandatory=True,
usedefault=True,
),
volume1=dict(mandatory=True,
),
volume2=dict(mandatory=True,
),
weighting=dict(usedefault=True,
),
)
inputs = Overlap.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Overlap_outputs():
output_map = dict(dice=dict(),
diff_file=dict(),
jaccard=dict(),
labels=dict(),
roi_di=dict(),
roi_ji=dict(),
roi_voldiff=dict(),
volume_difference=dict(),
)
outputs = Overlap.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
<commit_before><commit_msg>Test overlap not included before<commit_after># AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.algorithms.misc import Overlap
def test_Overlap_inputs():
input_map = dict(bg_overlap=dict(mandatory=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
mask_volume=dict(),
out_file=dict(usedefault=True,
),
vol_units=dict(mandatory=True,
usedefault=True,
),
volume1=dict(mandatory=True,
),
volume2=dict(mandatory=True,
),
weighting=dict(usedefault=True,
),
)
inputs = Overlap.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_Overlap_outputs():
output_map = dict(dice=dict(),
diff_file=dict(),
jaccard=dict(),
labels=dict(),
roi_di=dict(),
roi_ji=dict(),
roi_voldiff=dict(),
volume_difference=dict(),
)
outputs = Overlap.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
|
a9eecbd7f8ed29811cc4bb7021af49a75104db52
|
heat/engine/clients/microversion_mixin.py
|
heat/engine/clients/microversion_mixin.py
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from heat.common import exception
@six.add_metaclass(abc.ABCMeta)
class MicroversionMixin(object):
"""Mixin For microversion support."""
def client(self, version=None):
if version is None:
version = self.get_max_microversion()
elif not self.is_version_supported(version):
raise exception.InvalidServiceVersion(
version=version,
service=self._get_service_name())
if version in self._client_instances:
return self._client_instances[version]
self._client_instances[version] = self._create(version=version)
return self._client_instances[version]
@abc.abstractmethod
def get_max_microversion(self):
pass
@abc.abstractmethod
def is_version_supported(self, version):
pass
|
Add MicroversionMixin for microversion support
|
Add MicroversionMixin for microversion support
This generic mixin overloads plugin client() method to return
a client with max microversion unless a specific version is requested.
This provides two abstract methods
- get_max_microversion()
- is_version_supported()
for client plugins to override when supporting microversions.
Change-Id: I8873ab6d815671b6647b08578d1406dd874269f6
|
Python
|
apache-2.0
|
noironetworks/heat,openstack/heat,openstack/heat,noironetworks/heat
|
Add MicroversionMixin for microversion support
This generic mixin overloads plugin client() method to return
a client with max microversion unless a specific version is requested.
This provides two abstract methods
- get_max_microversion()
- is_version_supported()
for client plugins to override when supporting microversions.
Change-Id: I8873ab6d815671b6647b08578d1406dd874269f6
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from heat.common import exception
@six.add_metaclass(abc.ABCMeta)
class MicroversionMixin(object):
"""Mixin For microversion support."""
def client(self, version=None):
if version is None:
version = self.get_max_microversion()
elif not self.is_version_supported(version):
raise exception.InvalidServiceVersion(
version=version,
service=self._get_service_name())
if version in self._client_instances:
return self._client_instances[version]
self._client_instances[version] = self._create(version=version)
return self._client_instances[version]
@abc.abstractmethod
def get_max_microversion(self):
pass
@abc.abstractmethod
def is_version_supported(self, version):
pass
|
<commit_before><commit_msg>Add MicroversionMixin for microversion support
This generic mixin overloads plugin client() method to return
a client with max microversion unless a specific version is requested.
This provides two abstract methods
- get_max_microversion()
- is_version_supported()
for client plugins to override when supporting microversions.
Change-Id: I8873ab6d815671b6647b08578d1406dd874269f6<commit_after>
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from heat.common import exception
@six.add_metaclass(abc.ABCMeta)
class MicroversionMixin(object):
"""Mixin For microversion support."""
def client(self, version=None):
if version is None:
version = self.get_max_microversion()
elif not self.is_version_supported(version):
raise exception.InvalidServiceVersion(
version=version,
service=self._get_service_name())
if version in self._client_instances:
return self._client_instances[version]
self._client_instances[version] = self._create(version=version)
return self._client_instances[version]
@abc.abstractmethod
def get_max_microversion(self):
pass
@abc.abstractmethod
def is_version_supported(self, version):
pass
|
Add MicroversionMixin for microversion support
This generic mixin overloads plugin client() method to return
a client with max microversion unless a specific version is requested.
This provides two abstract methods
- get_max_microversion()
- is_version_supported()
for client plugins to override when supporting microversions.
Change-Id: I8873ab6d815671b6647b08578d1406dd874269f6#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from heat.common import exception
@six.add_metaclass(abc.ABCMeta)
class MicroversionMixin(object):
"""Mixin For microversion support."""
def client(self, version=None):
if version is None:
version = self.get_max_microversion()
elif not self.is_version_supported(version):
raise exception.InvalidServiceVersion(
version=version,
service=self._get_service_name())
if version in self._client_instances:
return self._client_instances[version]
self._client_instances[version] = self._create(version=version)
return self._client_instances[version]
@abc.abstractmethod
def get_max_microversion(self):
pass
@abc.abstractmethod
def is_version_supported(self, version):
pass
|
<commit_before><commit_msg>Add MicroversionMixin for microversion support
This generic mixin overloads plugin client() method to return
a client with max microversion unless a specific version is requested.
This provides two abstract methods
- get_max_microversion()
- is_version_supported()
for client plugins to override when supporting microversions.
Change-Id: I8873ab6d815671b6647b08578d1406dd874269f6<commit_after>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from heat.common import exception
@six.add_metaclass(abc.ABCMeta)
class MicroversionMixin(object):
"""Mixin For microversion support."""
def client(self, version=None):
if version is None:
version = self.get_max_microversion()
elif not self.is_version_supported(version):
raise exception.InvalidServiceVersion(
version=version,
service=self._get_service_name())
if version in self._client_instances:
return self._client_instances[version]
self._client_instances[version] = self._create(version=version)
return self._client_instances[version]
@abc.abstractmethod
def get_max_microversion(self):
pass
@abc.abstractmethod
def is_version_supported(self, version):
pass
|
|
ec935144dd60e6201686720f3d658a1ba5f26d83
|
python/ct/client/tools/dump_sctlist.py
|
python/ct/client/tools/dump_sctlist.py
|
#!/usr/bin/env python
""" This is a util to dump the SCTs contained within a
SignedCertificateTimestampList structure.
This structure is used to represent a collection of SCTs being passed over
a TLS handshake. See RFC6962 section 3.3 for more details. """
import sys
from ct.proto import client_pb2
from ct.serialization import tls_message
def dump_sctlist(sct_list):
"""Prints the proto representation of the SCTs contained in sct_list.
Arguments:
sct_list the packed SignedCertificateTransparencyList structure.
"""
tr = tls_message.TLSReader(sct_list)
sctlist = client_pb2.SignedCertificateTimestampList()
tr.read(sctlist)
for s in sctlist.sct_list:
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(s, sct)
print sct
if __name__ == "__main__":
if len(sys.argv) == 1:
print "Usage: dump_sctlist.py <file_containing_sct_list>"
sys.exit(1)
with open(sys.argv[1]) as f:
data = f.read()
dump_sctlist(data)
|
Add a tool to dump the contents of SignedCertificateTransparencyList structures.
|
Add a tool to dump the contents of SignedCertificateTransparencyList structures.
|
Python
|
apache-2.0
|
eranmes/certificate-transparency,eranmes/certificate-transparency,eranmes/certificate-transparency,AlCutter/certificate-transparency,AlCutter/certificate-transparency,eranmes/certificate-transparency,kyprizel/certificate-transparency,grandamp/certificate-transparency,RJPercival/certificate-transparency,benlaurie/certificate-transparency,lexibrent/certificate-transparency,katjoyce/certificate-transparency,taknira/certificate-transparency,kyprizel/certificate-transparency,grandamp/certificate-transparency,AlCutter/certificate-transparency,google/certificate-transparency,katjoyce/certificate-transparency,eranmes/certificate-transparency,taknira/certificate-transparency,benlaurie/certificate-transparency,lexibrent/certificate-transparency,google/certificate-transparency,taknira/certificate-transparency,benlaurie/certificate-transparency,phad/certificate-transparency,grandamp/certificate-transparency,RJPercival/certificate-transparency,katjoyce/certificate-transparency,taknira/certificate-transparency,phad/certificate-transparency,grandamp/certificate-transparency,AlCutter/certificate-transparency,pphaneuf/certificate-transparency,taknira/certificate-transparency,pphaneuf/certificate-transparency,RJPercival/certificate-transparency,google/certificate-transparency,eranmes/certificate-transparency,lexibrent/certificate-transparency,benlaurie/certificate-transparency,benlaurie/certificate-transparency,AlCutter/certificate-transparency,eranmes/certificate-transparency,katjoyce/certificate-transparency,katjoyce/certificate-transparency,kyprizel/certificate-transparency,kyprizel/certificate-transparency,katjoyce/certificate-transparency,benlaurie/certificate-transparency,pphaneuf/certificate-transparency,pphaneuf/certificate-transparency,taknira/certificate-transparency,kyprizel/certificate-transparency,lexibrent/certificate-transparency,katjoyce/certificate-transparency,taknira/certificate-transparency,kyprizel/certificate-transparency,RJPercival/certificate-transparency,benlaurie/certificate-transparency,AlCutter/certificate-transparency,phad/certificate-transparency,AlCutter/certificate-transparency,google/certificate-transparency,phad/certificate-transparency,kyprizel/certificate-transparency
|
Add a tool to dump the contents of SignedCertificateTransparencyList structures.
|
#!/usr/bin/env python
""" This is a util to dump the SCTs contained within a
SignedCertificateTimestampList structure.
This structure is used to represent a collection of SCTs being passed over
a TLS handshake. See RFC6962 section 3.3 for more details. """
import sys
from ct.proto import client_pb2
from ct.serialization import tls_message
def dump_sctlist(sct_list):
"""Prints the proto representation of the SCTs contained in sct_list.
Arguments:
sct_list the packed SignedCertificateTransparencyList structure.
"""
tr = tls_message.TLSReader(sct_list)
sctlist = client_pb2.SignedCertificateTimestampList()
tr.read(sctlist)
for s in sctlist.sct_list:
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(s, sct)
print sct
if __name__ == "__main__":
if len(sys.argv) == 1:
print "Usage: dump_sctlist.py <file_containing_sct_list>"
sys.exit(1)
with open(sys.argv[1]) as f:
data = f.read()
dump_sctlist(data)
|
<commit_before><commit_msg>Add a tool to dump the contents of SignedCertificateTransparencyList structures.<commit_after>
|
#!/usr/bin/env python
""" This is a util to dump the SCTs contained within a
SignedCertificateTimestampList structure.
This structure is used to represent a collection of SCTs being passed over
a TLS handshake. See RFC6962 section 3.3 for more details. """
import sys
from ct.proto import client_pb2
from ct.serialization import tls_message
def dump_sctlist(sct_list):
"""Prints the proto representation of the SCTs contained in sct_list.
Arguments:
sct_list the packed SignedCertificateTransparencyList structure.
"""
tr = tls_message.TLSReader(sct_list)
sctlist = client_pb2.SignedCertificateTimestampList()
tr.read(sctlist)
for s in sctlist.sct_list:
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(s, sct)
print sct
if __name__ == "__main__":
if len(sys.argv) == 1:
print "Usage: dump_sctlist.py <file_containing_sct_list>"
sys.exit(1)
with open(sys.argv[1]) as f:
data = f.read()
dump_sctlist(data)
|
Add a tool to dump the contents of SignedCertificateTransparencyList structures.#!/usr/bin/env python
""" This is a util to dump the SCTs contained within a
SignedCertificateTimestampList structure.
This structure is used to represent a collection of SCTs being passed over
a TLS handshake. See RFC6962 section 3.3 for more details. """
import sys
from ct.proto import client_pb2
from ct.serialization import tls_message
def dump_sctlist(sct_list):
"""Prints the proto representation of the SCTs contained in sct_list.
Arguments:
sct_list the packed SignedCertificateTransparencyList structure.
"""
tr = tls_message.TLSReader(sct_list)
sctlist = client_pb2.SignedCertificateTimestampList()
tr.read(sctlist)
for s in sctlist.sct_list:
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(s, sct)
print sct
if __name__ == "__main__":
if len(sys.argv) == 1:
print "Usage: dump_sctlist.py <file_containing_sct_list>"
sys.exit(1)
with open(sys.argv[1]) as f:
data = f.read()
dump_sctlist(data)
|
<commit_before><commit_msg>Add a tool to dump the contents of SignedCertificateTransparencyList structures.<commit_after>#!/usr/bin/env python
""" This is a util to dump the SCTs contained within a
SignedCertificateTimestampList structure.
This structure is used to represent a collection of SCTs being passed over
a TLS handshake. See RFC6962 section 3.3 for more details. """
import sys
from ct.proto import client_pb2
from ct.serialization import tls_message
def dump_sctlist(sct_list):
"""Prints the proto representation of the SCTs contained in sct_list.
Arguments:
sct_list the packed SignedCertificateTransparencyList structure.
"""
tr = tls_message.TLSReader(sct_list)
sctlist = client_pb2.SignedCertificateTimestampList()
tr.read(sctlist)
for s in sctlist.sct_list:
sct = client_pb2.SignedCertificateTimestamp()
tls_message.decode(s, sct)
print sct
if __name__ == "__main__":
if len(sys.argv) == 1:
print "Usage: dump_sctlist.py <file_containing_sct_list>"
sys.exit(1)
with open(sys.argv[1]) as f:
data = f.read()
dump_sctlist(data)
|
|
03b7c02cacce93dca6f95edd91a6b3f8307c34ec
|
neuroimaging/utils/tests/data/__init__.py
|
neuroimaging/utils/tests/data/__init__.py
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
Add data repository package. Link to externally installed nipy data.
|
Add data repository package. Link to externally installed nipy data.
|
Python
|
bsd-3-clause
|
nipy/nipy-labs,alexis-roche/niseg,arokem/nipy,alexis-roche/nipy,alexis-roche/register,nipy/nipy-labs,alexis-roche/nireg,alexis-roche/register,alexis-roche/nipy,alexis-roche/nipy,bthirion/nipy,alexis-roche/nireg,bthirion/nipy,alexis-roche/niseg,arokem/nipy,nipy/nireg,bthirion/nipy,bthirion/nipy,arokem/nipy,arokem/nipy,nipy/nireg,alexis-roche/register,alexis-roche/nipy
|
Add data repository package. Link to externally installed nipy data.
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
<commit_before><commit_msg>Add data repository package. Link to externally installed nipy data.<commit_after>
|
"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
Add data repository package. Link to externally installed nipy data."""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
<commit_before><commit_msg>Add data repository package. Link to externally installed nipy data.<commit_after>"""Information used for locating nipy test data.
Nipy uses a set of test data that is installed separately. The test
data should be located in the directory ``~/.nipy/tests/data``.
Install the data in your home directory from the data repository::
$ mkdir -p .nipy/tests/data
$ svn co http://neuroimaging.scipy.org/svn/ni/data/trunk/fmri .nipy/tests/data
"""
from os.path import expanduser, exists, join
from neuroimaging.data_io.datasource import Repository
# data directory should be: $HOME/.nipy/tests/data
datapath = expanduser(join('~', '.nipy', 'tests', 'data'))
if not exists(datapath):
raise IOError, 'Nipy data directory is not found!'
repository = Repository(datapath)
|
|
3e9156b98a1ab665fa02677dd247860d13a15443
|
tests/unit/test_dual.py
|
tests/unit/test_dual.py
|
# Import libnacl libs
import libnacl.public
import libnacl.dual
# Import python libs
import unittest
class TestPublic(unittest.TestCase):
'''
'''
def test_secretkey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
bob_box = libnacl.public.Box(bob.sk, alice.pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
alice_ctxt = alice_box.encrypt(msg)
self.assertNotEqual(msg, alice_ctxt)
aclear = alice_box.decrypt(alice_ctxt)
self.assertEqual(msg, aclear)
self.assertNotEqual(bob_ctxt, alice_ctxt)
def test_publickey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
alice_pk = libnacl.public.PublicKey(alice.pk)
bob_box = libnacl.public.Box(bob.sk, alice_pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
|
Add tests for dual secret keys
|
Add tests for dual secret keys
|
Python
|
apache-2.0
|
cachedout/libnacl,mindw/libnacl,johnttan/libnacl,RaetProtocol/libnacl,saltstack/libnacl,coinkite/libnacl
|
Add tests for dual secret keys
|
# Import libnacl libs
import libnacl.public
import libnacl.dual
# Import python libs
import unittest
class TestPublic(unittest.TestCase):
'''
'''
def test_secretkey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
bob_box = libnacl.public.Box(bob.sk, alice.pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
alice_ctxt = alice_box.encrypt(msg)
self.assertNotEqual(msg, alice_ctxt)
aclear = alice_box.decrypt(alice_ctxt)
self.assertEqual(msg, aclear)
self.assertNotEqual(bob_ctxt, alice_ctxt)
def test_publickey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
alice_pk = libnacl.public.PublicKey(alice.pk)
bob_box = libnacl.public.Box(bob.sk, alice_pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
|
<commit_before><commit_msg>Add tests for dual secret keys<commit_after>
|
# Import libnacl libs
import libnacl.public
import libnacl.dual
# Import python libs
import unittest
class TestPublic(unittest.TestCase):
'''
'''
def test_secretkey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
bob_box = libnacl.public.Box(bob.sk, alice.pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
alice_ctxt = alice_box.encrypt(msg)
self.assertNotEqual(msg, alice_ctxt)
aclear = alice_box.decrypt(alice_ctxt)
self.assertEqual(msg, aclear)
self.assertNotEqual(bob_ctxt, alice_ctxt)
def test_publickey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
alice_pk = libnacl.public.PublicKey(alice.pk)
bob_box = libnacl.public.Box(bob.sk, alice_pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
|
Add tests for dual secret keys# Import libnacl libs
import libnacl.public
import libnacl.dual
# Import python libs
import unittest
class TestPublic(unittest.TestCase):
'''
'''
def test_secretkey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
bob_box = libnacl.public.Box(bob.sk, alice.pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
alice_ctxt = alice_box.encrypt(msg)
self.assertNotEqual(msg, alice_ctxt)
aclear = alice_box.decrypt(alice_ctxt)
self.assertEqual(msg, aclear)
self.assertNotEqual(bob_ctxt, alice_ctxt)
def test_publickey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
alice_pk = libnacl.public.PublicKey(alice.pk)
bob_box = libnacl.public.Box(bob.sk, alice_pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
|
<commit_before><commit_msg>Add tests for dual secret keys<commit_after># Import libnacl libs
import libnacl.public
import libnacl.dual
# Import python libs
import unittest
class TestPublic(unittest.TestCase):
'''
'''
def test_secretkey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
bob_box = libnacl.public.Box(bob.sk, alice.pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
alice_ctxt = alice_box.encrypt(msg)
self.assertNotEqual(msg, alice_ctxt)
aclear = alice_box.decrypt(alice_ctxt)
self.assertEqual(msg, aclear)
self.assertNotEqual(bob_ctxt, alice_ctxt)
def test_publickey(self):
'''
'''
msg = b'You\'ve got two empty halves of coconut and you\'re bangin\' \'em together.'
bob = libnacl.dual.DualSecret()
alice = libnacl.dual.DualSecret()
alice_pk = libnacl.public.PublicKey(alice.pk)
bob_box = libnacl.public.Box(bob.sk, alice_pk)
alice_box = libnacl.public.Box(alice.sk, bob.pk)
bob_ctxt = bob_box.encrypt(msg)
self.assertNotEqual(msg, bob_ctxt)
bclear = alice_box.decrypt(bob_ctxt)
self.assertEqual(msg, bclear)
|
|
872367f51c89f622e61cccfd051f041aa69002f6
|
l10n_ar_account/migrations/9.0.1.17.0/post-migration.py
|
l10n_ar_account/migrations/9.0.1.17.0/post-migration.py
|
# -*- coding: utf-8 -*-
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
# now that we have account_fix as depend we fix previous invoices
env['account.invoice'].search(
[('type', 'in', ['in_refund', 'out_refund'])]).compute_taxes()
|
ADD mig script to fix refunds taxes
|
ADD mig script to fix refunds taxes
|
Python
|
agpl-3.0
|
jobiols/odoo-argentina,jobiols/odoo-argentina,bmya/odoo-argentina,bmya/odoo-argentina,adhoc-dev/odoo-argentina,ingadhoc/odoo-argentina,adhoc-dev/odoo-argentina
|
ADD mig script to fix refunds taxes
|
# -*- coding: utf-8 -*-
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
# now that we have account_fix as depend we fix previous invoices
env['account.invoice'].search(
[('type', 'in', ['in_refund', 'out_refund'])]).compute_taxes()
|
<commit_before><commit_msg>ADD mig script to fix refunds taxes<commit_after>
|
# -*- coding: utf-8 -*-
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
# now that we have account_fix as depend we fix previous invoices
env['account.invoice'].search(
[('type', 'in', ['in_refund', 'out_refund'])]).compute_taxes()
|
ADD mig script to fix refunds taxes# -*- coding: utf-8 -*-
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
# now that we have account_fix as depend we fix previous invoices
env['account.invoice'].search(
[('type', 'in', ['in_refund', 'out_refund'])]).compute_taxes()
|
<commit_before><commit_msg>ADD mig script to fix refunds taxes<commit_after># -*- coding: utf-8 -*-
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
# now that we have account_fix as depend we fix previous invoices
env['account.invoice'].search(
[('type', 'in', ['in_refund', 'out_refund'])]).compute_taxes()
|
|
c55e31c040e4a1eae5ad6ec79d479eaab4d4a178
|
microcosm_flask/tests/conventions/test_logging_level.py
|
microcosm_flask/tests/conventions/test_logging_level.py
|
"""
Logging level test convention.
"""
from json import loads
from hamcrest import (
assert_that,
equal_to,
has_entries,
has_items,
is_,
)
from microcosm.api import create_object_graph
def test_logging_level_convention():
"""
Default health check returns OK.
"""
graph = create_object_graph(name="example", testing=True)
graph.use("logging_level_convention")
graph.lock()
client = graph.flask.test_client()
response = client.get("/api/logging_level")
assert_that(response.status_code, is_(equal_to(200)))
data = loads(response.get_data().decode("utf-8"))
assert_that(
data,
# this is a partial match
has_entries(
name="",
level="INFO",
children=has_items(
has_entries(
name="bravado",
level="INFO",
children=has_items(
has_entries(
name="bravado.requests_client",
level="ERROR",
),
),
),
has_entries(
name="requests",
level="WARNING",
),
),
),
)
|
Add unit test for logging level convention
|
Add unit test for logging level convention
|
Python
|
apache-2.0
|
globality-corp/microcosm-flask,globality-corp/microcosm-flask
|
Add unit test for logging level convention
|
"""
Logging level test convention.
"""
from json import loads
from hamcrest import (
assert_that,
equal_to,
has_entries,
has_items,
is_,
)
from microcosm.api import create_object_graph
def test_logging_level_convention():
"""
Default health check returns OK.
"""
graph = create_object_graph(name="example", testing=True)
graph.use("logging_level_convention")
graph.lock()
client = graph.flask.test_client()
response = client.get("/api/logging_level")
assert_that(response.status_code, is_(equal_to(200)))
data = loads(response.get_data().decode("utf-8"))
assert_that(
data,
# this is a partial match
has_entries(
name="",
level="INFO",
children=has_items(
has_entries(
name="bravado",
level="INFO",
children=has_items(
has_entries(
name="bravado.requests_client",
level="ERROR",
),
),
),
has_entries(
name="requests",
level="WARNING",
),
),
),
)
|
<commit_before><commit_msg>Add unit test for logging level convention<commit_after>
|
"""
Logging level test convention.
"""
from json import loads
from hamcrest import (
assert_that,
equal_to,
has_entries,
has_items,
is_,
)
from microcosm.api import create_object_graph
def test_logging_level_convention():
"""
Default health check returns OK.
"""
graph = create_object_graph(name="example", testing=True)
graph.use("logging_level_convention")
graph.lock()
client = graph.flask.test_client()
response = client.get("/api/logging_level")
assert_that(response.status_code, is_(equal_to(200)))
data = loads(response.get_data().decode("utf-8"))
assert_that(
data,
# this is a partial match
has_entries(
name="",
level="INFO",
children=has_items(
has_entries(
name="bravado",
level="INFO",
children=has_items(
has_entries(
name="bravado.requests_client",
level="ERROR",
),
),
),
has_entries(
name="requests",
level="WARNING",
),
),
),
)
|
Add unit test for logging level convention"""
Logging level test convention.
"""
from json import loads
from hamcrest import (
assert_that,
equal_to,
has_entries,
has_items,
is_,
)
from microcosm.api import create_object_graph
def test_logging_level_convention():
"""
Default health check returns OK.
"""
graph = create_object_graph(name="example", testing=True)
graph.use("logging_level_convention")
graph.lock()
client = graph.flask.test_client()
response = client.get("/api/logging_level")
assert_that(response.status_code, is_(equal_to(200)))
data = loads(response.get_data().decode("utf-8"))
assert_that(
data,
# this is a partial match
has_entries(
name="",
level="INFO",
children=has_items(
has_entries(
name="bravado",
level="INFO",
children=has_items(
has_entries(
name="bravado.requests_client",
level="ERROR",
),
),
),
has_entries(
name="requests",
level="WARNING",
),
),
),
)
|
<commit_before><commit_msg>Add unit test for logging level convention<commit_after>"""
Logging level test convention.
"""
from json import loads
from hamcrest import (
assert_that,
equal_to,
has_entries,
has_items,
is_,
)
from microcosm.api import create_object_graph
def test_logging_level_convention():
"""
Default health check returns OK.
"""
graph = create_object_graph(name="example", testing=True)
graph.use("logging_level_convention")
graph.lock()
client = graph.flask.test_client()
response = client.get("/api/logging_level")
assert_that(response.status_code, is_(equal_to(200)))
data = loads(response.get_data().decode("utf-8"))
assert_that(
data,
# this is a partial match
has_entries(
name="",
level="INFO",
children=has_items(
has_entries(
name="bravado",
level="INFO",
children=has_items(
has_entries(
name="bravado.requests_client",
level="ERROR",
),
),
),
has_entries(
name="requests",
level="WARNING",
),
),
),
)
|
|
7805ee9f5257ed0711bf2e18960030a8d66e7243
|
melon/melon_server.py
|
melon/melon_server.py
|
from flask import Flask
from flask import jsonify
import json
from bson.json_util import dumps as bson_dumps
# from flask_cors import CORS
import sacred_mongo
app = Flask(__name__)
# enable CORS since the back-end is on a different place than th front-end
# CORS(app)
app.debug = True
@app.route("/experiments", methods=['GET'])
def experiments():
return jsonify({'experiments': sacred_mongo.list_experiments()})
@app.route("/runs", methods=['GET'])
def list_runs():
return jsonify({'runs': [str(r['_id']) for r in sacred_mongo.list_runs()]})
@app.route("/runs/<run_id>", methods=['GET'])
def run_details(run_id):
return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
if __name__ == "__main__":
# http://stackoverflow.com/questions/23639355/extremely-long-wait-time-when-loading-rest-resource-from-angularjs
# app.run(host="0.0.0.0", threaded=True)
app.run()
|
Create very rudimentary Flask JSON API to list the experiments.
|
Create very rudimentary Flask JSON API to list the experiments.
|
Python
|
mit
|
bzamecnik/sanctuary
|
Create very rudimentary Flask JSON API to list the experiments.
|
from flask import Flask
from flask import jsonify
import json
from bson.json_util import dumps as bson_dumps
# from flask_cors import CORS
import sacred_mongo
app = Flask(__name__)
# enable CORS since the back-end is on a different place than th front-end
# CORS(app)
app.debug = True
@app.route("/experiments", methods=['GET'])
def experiments():
return jsonify({'experiments': sacred_mongo.list_experiments()})
@app.route("/runs", methods=['GET'])
def list_runs():
return jsonify({'runs': [str(r['_id']) for r in sacred_mongo.list_runs()]})
@app.route("/runs/<run_id>", methods=['GET'])
def run_details(run_id):
return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
if __name__ == "__main__":
# http://stackoverflow.com/questions/23639355/extremely-long-wait-time-when-loading-rest-resource-from-angularjs
# app.run(host="0.0.0.0", threaded=True)
app.run()
|
<commit_before><commit_msg>Create very rudimentary Flask JSON API to list the experiments.<commit_after>
|
from flask import Flask
from flask import jsonify
import json
from bson.json_util import dumps as bson_dumps
# from flask_cors import CORS
import sacred_mongo
app = Flask(__name__)
# enable CORS since the back-end is on a different place than th front-end
# CORS(app)
app.debug = True
@app.route("/experiments", methods=['GET'])
def experiments():
return jsonify({'experiments': sacred_mongo.list_experiments()})
@app.route("/runs", methods=['GET'])
def list_runs():
return jsonify({'runs': [str(r['_id']) for r in sacred_mongo.list_runs()]})
@app.route("/runs/<run_id>", methods=['GET'])
def run_details(run_id):
return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
if __name__ == "__main__":
# http://stackoverflow.com/questions/23639355/extremely-long-wait-time-when-loading-rest-resource-from-angularjs
# app.run(host="0.0.0.0", threaded=True)
app.run()
|
Create very rudimentary Flask JSON API to list the experiments.from flask import Flask
from flask import jsonify
import json
from bson.json_util import dumps as bson_dumps
# from flask_cors import CORS
import sacred_mongo
app = Flask(__name__)
# enable CORS since the back-end is on a different place than th front-end
# CORS(app)
app.debug = True
@app.route("/experiments", methods=['GET'])
def experiments():
return jsonify({'experiments': sacred_mongo.list_experiments()})
@app.route("/runs", methods=['GET'])
def list_runs():
return jsonify({'runs': [str(r['_id']) for r in sacred_mongo.list_runs()]})
@app.route("/runs/<run_id>", methods=['GET'])
def run_details(run_id):
return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
if __name__ == "__main__":
# http://stackoverflow.com/questions/23639355/extremely-long-wait-time-when-loading-rest-resource-from-angularjs
# app.run(host="0.0.0.0", threaded=True)
app.run()
|
<commit_before><commit_msg>Create very rudimentary Flask JSON API to list the experiments.<commit_after>from flask import Flask
from flask import jsonify
import json
from bson.json_util import dumps as bson_dumps
# from flask_cors import CORS
import sacred_mongo
app = Flask(__name__)
# enable CORS since the back-end is on a different place than th front-end
# CORS(app)
app.debug = True
@app.route("/experiments", methods=['GET'])
def experiments():
return jsonify({'experiments': sacred_mongo.list_experiments()})
@app.route("/runs", methods=['GET'])
def list_runs():
return jsonify({'runs': [str(r['_id']) for r in sacred_mongo.list_runs()]})
@app.route("/runs/<run_id>", methods=['GET'])
def run_details(run_id):
return jsonify(json.loads(bson_dumps(sacred_mongo.get_run(run_id))))
if __name__ == "__main__":
# http://stackoverflow.com/questions/23639355/extremely-long-wait-time-when-loading-rest-resource-from-angularjs
# app.run(host="0.0.0.0", threaded=True)
app.run()
|
|
2e72c7cb3e6dbd8797e537a58c6eef6350b2dd4d
|
plugins/logger/loggertype/test/test_logger_registrar.py
|
plugins/logger/loggertype/test/test_logger_registrar.py
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
import functools #To test filling in partials via metadata.
import loggertype.loggerregistrar #The module we're testing.
import luna.test_case #For parametrised tests.
def _arbitrary_function(x, y):
"""
A function to provide in test metadata.
:param x: The first argument.
:param y: The second argument.
:return The product of the two arguments.
"""
return x * y
class _CallableObject:
"""
An object to provide in test metadata which has a __call__ function.
"""
def __call__(self, *args, **kwargs):
"""
Calls the callable object, which does nothing.
:param args: Arguments to call the object with.
:param kwargs: Key-word arguments to call the object with.
"""
pass
class TestLoggerRegistrar(luna.test_case.TestCase):
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
def _arbitrary_method(self):
"""
A method to provide in test metadata.
"""
pass
@luna.test_case.parametrise({
"functions": {
"metadata": {
"logger": {
"critical": _arbitrary_function,
"debug": _arbitrary_function,
"error": _arbitrary_function,
"info": _arbitrary_function,
"warning": _arbitrary_function
}
}
},
"various_callables": {
"metadata": {
"logger": {
"critical": print, #A built-in.
"debug": _arbitrary_method, #A normal function.
"error": _CallableObject, #A callable object.
"info": lambda x: x, #A lambda function.
"warning": functools.partial(_arbitrary_function, 3) #A partial function.
}
}
}
})
def test_validate_metadata_correct(self, metadata):
"""
Tests the validate_metadata function against metadata that is correct.
The function is tested with various instances of metadata, all of which
are correct. It is tested if the validation deems the metadata correct
also.
:param metadata: Correct metadata.
"""
loggertype.loggerregistrar.validate_metadata(metadata) #Should not give an exception.
|
Add test for happy path of validate_metadata
|
Add test for happy path of validate_metadata
A good test for our parametrised decorator, too.
|
Python
|
cc0-1.0
|
Ghostkeeper/Luna
|
Add test for happy path of validate_metadata
A good test for our parametrised decorator, too.
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
import functools #To test filling in partials via metadata.
import loggertype.loggerregistrar #The module we're testing.
import luna.test_case #For parametrised tests.
def _arbitrary_function(x, y):
"""
A function to provide in test metadata.
:param x: The first argument.
:param y: The second argument.
:return The product of the two arguments.
"""
return x * y
class _CallableObject:
"""
An object to provide in test metadata which has a __call__ function.
"""
def __call__(self, *args, **kwargs):
"""
Calls the callable object, which does nothing.
:param args: Arguments to call the object with.
:param kwargs: Key-word arguments to call the object with.
"""
pass
class TestLoggerRegistrar(luna.test_case.TestCase):
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
def _arbitrary_method(self):
"""
A method to provide in test metadata.
"""
pass
@luna.test_case.parametrise({
"functions": {
"metadata": {
"logger": {
"critical": _arbitrary_function,
"debug": _arbitrary_function,
"error": _arbitrary_function,
"info": _arbitrary_function,
"warning": _arbitrary_function
}
}
},
"various_callables": {
"metadata": {
"logger": {
"critical": print, #A built-in.
"debug": _arbitrary_method, #A normal function.
"error": _CallableObject, #A callable object.
"info": lambda x: x, #A lambda function.
"warning": functools.partial(_arbitrary_function, 3) #A partial function.
}
}
}
})
def test_validate_metadata_correct(self, metadata):
"""
Tests the validate_metadata function against metadata that is correct.
The function is tested with various instances of metadata, all of which
are correct. It is tested if the validation deems the metadata correct
also.
:param metadata: Correct metadata.
"""
loggertype.loggerregistrar.validate_metadata(metadata) #Should not give an exception.
|
<commit_before><commit_msg>Add test for happy path of validate_metadata
A good test for our parametrised decorator, too.<commit_after>
|
#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
import functools #To test filling in partials via metadata.
import loggertype.loggerregistrar #The module we're testing.
import luna.test_case #For parametrised tests.
def _arbitrary_function(x, y):
"""
A function to provide in test metadata.
:param x: The first argument.
:param y: The second argument.
:return The product of the two arguments.
"""
return x * y
class _CallableObject:
"""
An object to provide in test metadata which has a __call__ function.
"""
def __call__(self, *args, **kwargs):
"""
Calls the callable object, which does nothing.
:param args: Arguments to call the object with.
:param kwargs: Key-word arguments to call the object with.
"""
pass
class TestLoggerRegistrar(luna.test_case.TestCase):
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
def _arbitrary_method(self):
"""
A method to provide in test metadata.
"""
pass
@luna.test_case.parametrise({
"functions": {
"metadata": {
"logger": {
"critical": _arbitrary_function,
"debug": _arbitrary_function,
"error": _arbitrary_function,
"info": _arbitrary_function,
"warning": _arbitrary_function
}
}
},
"various_callables": {
"metadata": {
"logger": {
"critical": print, #A built-in.
"debug": _arbitrary_method, #A normal function.
"error": _CallableObject, #A callable object.
"info": lambda x: x, #A lambda function.
"warning": functools.partial(_arbitrary_function, 3) #A partial function.
}
}
}
})
def test_validate_metadata_correct(self, metadata):
"""
Tests the validate_metadata function against metadata that is correct.
The function is tested with various instances of metadata, all of which
are correct. It is tested if the validation deems the metadata correct
also.
:param metadata: Correct metadata.
"""
loggertype.loggerregistrar.validate_metadata(metadata) #Should not give an exception.
|
Add test for happy path of validate_metadata
A good test for our parametrised decorator, too.#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
import functools #To test filling in partials via metadata.
import loggertype.loggerregistrar #The module we're testing.
import luna.test_case #For parametrised tests.
def _arbitrary_function(x, y):
"""
A function to provide in test metadata.
:param x: The first argument.
:param y: The second argument.
:return The product of the two arguments.
"""
return x * y
class _CallableObject:
"""
An object to provide in test metadata which has a __call__ function.
"""
def __call__(self, *args, **kwargs):
"""
Calls the callable object, which does nothing.
:param args: Arguments to call the object with.
:param kwargs: Key-word arguments to call the object with.
"""
pass
class TestLoggerRegistrar(luna.test_case.TestCase):
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
def _arbitrary_method(self):
"""
A method to provide in test metadata.
"""
pass
@luna.test_case.parametrise({
"functions": {
"metadata": {
"logger": {
"critical": _arbitrary_function,
"debug": _arbitrary_function,
"error": _arbitrary_function,
"info": _arbitrary_function,
"warning": _arbitrary_function
}
}
},
"various_callables": {
"metadata": {
"logger": {
"critical": print, #A built-in.
"debug": _arbitrary_method, #A normal function.
"error": _CallableObject, #A callable object.
"info": lambda x: x, #A lambda function.
"warning": functools.partial(_arbitrary_function, 3) #A partial function.
}
}
}
})
def test_validate_metadata_correct(self, metadata):
"""
Tests the validate_metadata function against metadata that is correct.
The function is tested with various instances of metadata, all of which
are correct. It is tested if the validation deems the metadata correct
also.
:param metadata: Correct metadata.
"""
loggertype.loggerregistrar.validate_metadata(metadata) #Should not give an exception.
|
<commit_before><commit_msg>Add test for happy path of validate_metadata
A good test for our parametrised decorator, too.<commit_after>#!/usr/bin/env python
#-*- coding: utf-8 -*-
#This software is distributed under the Creative Commons license (CC0) version 1.0. A copy of this license should have been distributed with this software.
#The license can also be read online: <https://creativecommons.org/publicdomain/zero/1.0/>. If this online license differs from the license provided with this software, the license provided with this software should be applied.
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
import functools #To test filling in partials via metadata.
import loggertype.loggerregistrar #The module we're testing.
import luna.test_case #For parametrised tests.
def _arbitrary_function(x, y):
"""
A function to provide in test metadata.
:param x: The first argument.
:param y: The second argument.
:return The product of the two arguments.
"""
return x * y
class _CallableObject:
"""
An object to provide in test metadata which has a __call__ function.
"""
def __call__(self, *args, **kwargs):
"""
Calls the callable object, which does nothing.
:param args: Arguments to call the object with.
:param kwargs: Key-word arguments to call the object with.
"""
pass
class TestLoggerRegistrar(luna.test_case.TestCase):
"""
Tests the behaviour of the registrar that registers logger plug-ins.
"""
def _arbitrary_method(self):
"""
A method to provide in test metadata.
"""
pass
@luna.test_case.parametrise({
"functions": {
"metadata": {
"logger": {
"critical": _arbitrary_function,
"debug": _arbitrary_function,
"error": _arbitrary_function,
"info": _arbitrary_function,
"warning": _arbitrary_function
}
}
},
"various_callables": {
"metadata": {
"logger": {
"critical": print, #A built-in.
"debug": _arbitrary_method, #A normal function.
"error": _CallableObject, #A callable object.
"info": lambda x: x, #A lambda function.
"warning": functools.partial(_arbitrary_function, 3) #A partial function.
}
}
}
})
def test_validate_metadata_correct(self, metadata):
"""
Tests the validate_metadata function against metadata that is correct.
The function is tested with various instances of metadata, all of which
are correct. It is tested if the validation deems the metadata correct
also.
:param metadata: Correct metadata.
"""
loggertype.loggerregistrar.validate_metadata(metadata) #Should not give an exception.
|
|
30d70f30b24454affaf56299a014e577089dc885
|
tools/telemetry/catapult_base/__init__.py
|
tools/telemetry/catapult_base/__init__.py
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
|
Add catapult_base folder to tools/telemetry to make the refactor easier.
|
Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}
|
Python
|
bsd-3-clause
|
axinging/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,hgl888/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Just-D/chromium-1,Chilledheart/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk
|
Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
|
<commit_before><commit_msg>Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}<commit_after>
|
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
|
Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
|
<commit_before><commit_msg>Add catapult_base folder to tools/telemetry to make the refactor easier.
This will make some of the refactoring more obvious and easy to review, as
well as making the needed reafctoring after moving to the catapult repo easier.
BUG=473414
Review URL: https://codereview.chromium.org/1168263002
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#333399}<commit_after># Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# All files in this directory should be moved to catapult/base/ after moving
# to the new repo.
|
|
67cab1af0fcca0634d52cfd369564b3a896cc2e5
|
CheckIO/First_Island/5_Unlucky_days.py
|
CheckIO/First_Island/5_Unlucky_days.py
|
#!/usr/bin/env python
from datetime import date
def checkio(year):
day_count = 0
for month in range(1, 13):
day = date(year, month, 13)
# Day = ISO8601 1..7 for Mon -> Sun
if day.isoweekday() == 5:
day_count += 1
return day_count
def test_function():
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(2015) == 3, "First - 2015"
assert checkio(1986) == 1, "Second - 1986"
assert checkio(2689) == 2, "2689"
if __name__ == "__main__":
test_function()
|
Add 1 check, unlucky days
|
Add 1 check, unlucky days
|
Python
|
mit
|
marshallhumble/Coding_Challenges,marshallhumble/Coding_Challenges,marshallhumble/Euler_Groovy
|
Add 1 check, unlucky days
|
#!/usr/bin/env python
from datetime import date
def checkio(year):
day_count = 0
for month in range(1, 13):
day = date(year, month, 13)
# Day = ISO8601 1..7 for Mon -> Sun
if day.isoweekday() == 5:
day_count += 1
return day_count
def test_function():
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(2015) == 3, "First - 2015"
assert checkio(1986) == 1, "Second - 1986"
assert checkio(2689) == 2, "2689"
if __name__ == "__main__":
test_function()
|
<commit_before><commit_msg>Add 1 check, unlucky days<commit_after>
|
#!/usr/bin/env python
from datetime import date
def checkio(year):
day_count = 0
for month in range(1, 13):
day = date(year, month, 13)
# Day = ISO8601 1..7 for Mon -> Sun
if day.isoweekday() == 5:
day_count += 1
return day_count
def test_function():
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(2015) == 3, "First - 2015"
assert checkio(1986) == 1, "Second - 1986"
assert checkio(2689) == 2, "2689"
if __name__ == "__main__":
test_function()
|
Add 1 check, unlucky days#!/usr/bin/env python
from datetime import date
def checkio(year):
day_count = 0
for month in range(1, 13):
day = date(year, month, 13)
# Day = ISO8601 1..7 for Mon -> Sun
if day.isoweekday() == 5:
day_count += 1
return day_count
def test_function():
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(2015) == 3, "First - 2015"
assert checkio(1986) == 1, "Second - 1986"
assert checkio(2689) == 2, "2689"
if __name__ == "__main__":
test_function()
|
<commit_before><commit_msg>Add 1 check, unlucky days<commit_after>#!/usr/bin/env python
from datetime import date
def checkio(year):
day_count = 0
for month in range(1, 13):
day = date(year, month, 13)
# Day = ISO8601 1..7 for Mon -> Sun
if day.isoweekday() == 5:
day_count += 1
return day_count
def test_function():
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(2015) == 3, "First - 2015"
assert checkio(1986) == 1, "Second - 1986"
assert checkio(2689) == 2, "2689"
if __name__ == "__main__":
test_function()
|
|
0c46531fd8eb4b492f4f1645a444eb3914dec3af
|
recurring_contract/migrations/9.0.1.0.0/pre-migration.py
|
recurring_contract/migrations/9.0.1.0.0/pre-migration.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
cr = env.cr
# Install dependency
openupgrade.logged_query(cr, """
UPDATE ir_module_module
SET state='to install'
WHERE name = 'account_payment_mode' AND state='uninstalled';
""")
|
Add migration script for recurring_contract
|
Add migration script for recurring_contract
|
Python
|
agpl-3.0
|
ecino/compassion-accounting,ecino/compassion-accounting,CompassionCH/compassion-accounting,CompassionCH/compassion-accounting
|
Add migration script for recurring_contract
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
cr = env.cr
# Install dependency
openupgrade.logged_query(cr, """
UPDATE ir_module_module
SET state='to install'
WHERE name = 'account_payment_mode' AND state='uninstalled';
""")
|
<commit_before><commit_msg>Add migration script for recurring_contract<commit_after>
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
cr = env.cr
# Install dependency
openupgrade.logged_query(cr, """
UPDATE ir_module_module
SET state='to install'
WHERE name = 'account_payment_mode' AND state='uninstalled';
""")
|
Add migration script for recurring_contract# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
cr = env.cr
# Install dependency
openupgrade.logged_query(cr, """
UPDATE ir_module_module
SET state='to install'
WHERE name = 'account_payment_mode' AND state='uninstalled';
""")
|
<commit_before><commit_msg>Add migration script for recurring_contract<commit_after># -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2017 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <ecino@compassion.ch>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openupgradelib import openupgrade
@openupgrade.migrate(use_env=True)
def migrate(env, version):
if not version:
return
cr = env.cr
# Install dependency
openupgrade.logged_query(cr, """
UPDATE ir_module_module
SET state='to install'
WHERE name = 'account_payment_mode' AND state='uninstalled';
""")
|
|
2d244668103e0a732e618ba6de60f9d1b75dd329
|
salt_observer/management/commands/checkdomainvalidity.py
|
salt_observer/management/commands/checkdomainvalidity.py
|
from django.core.management.base import BaseCommand
from salt_observer.models import Domain
class Command(BaseCommand):
help = 'Check all domains if they are valid'
def handle(self, *args, **kwargs):
''' Check every domain '''
for domain in Domain.objects.all():
domain.check_if_valid()
|
Add new command to check if domains are valid
|
Add new command to check if domains are valid
|
Python
|
mit
|
hs-hannover/salt-observer,hs-hannover/salt-observer,hs-hannover/salt-observer
|
Add new command to check if domains are valid
|
from django.core.management.base import BaseCommand
from salt_observer.models import Domain
class Command(BaseCommand):
help = 'Check all domains if they are valid'
def handle(self, *args, **kwargs):
''' Check every domain '''
for domain in Domain.objects.all():
domain.check_if_valid()
|
<commit_before><commit_msg>Add new command to check if domains are valid<commit_after>
|
from django.core.management.base import BaseCommand
from salt_observer.models import Domain
class Command(BaseCommand):
help = 'Check all domains if they are valid'
def handle(self, *args, **kwargs):
''' Check every domain '''
for domain in Domain.objects.all():
domain.check_if_valid()
|
Add new command to check if domains are validfrom django.core.management.base import BaseCommand
from salt_observer.models import Domain
class Command(BaseCommand):
help = 'Check all domains if they are valid'
def handle(self, *args, **kwargs):
''' Check every domain '''
for domain in Domain.objects.all():
domain.check_if_valid()
|
<commit_before><commit_msg>Add new command to check if domains are valid<commit_after>from django.core.management.base import BaseCommand
from salt_observer.models import Domain
class Command(BaseCommand):
help = 'Check all domains if they are valid'
def handle(self, *args, **kwargs):
''' Check every domain '''
for domain in Domain.objects.all():
domain.check_if_valid()
|
|
da01f6f1a230cb2889099357828ab750c8ee9eac
|
kboard/board/tests/test_templatetags.py
|
kboard/board/tests/test_templatetags.py
|
from django.core.urlresolvers import reverse
from .base import BoardAppTest
from board.templatetags.url_parameter import url_parameter
class UrlParameterTest(BoardAppTest):
def test_contains_correct_string(self):
parameter = {
'a': 13,
'query': 'hello',
'b': 'This is a test'
}
url_string = url_parameter(**parameter)
self.assertIn('a=13', url_string)
self.assertIn('query=hello', url_string)
self.assertIn('b=This is a test', url_string)
def test_returns_empty_string_when_there_is_no_data(self):
parameter = {}
url_string = url_parameter(**parameter)
self.assertEqual('', url_string)
|
Add unittest of 'url_parameter' template tag
|
Add unittest of 'url_parameter' template tag
|
Python
|
mit
|
guswnsxodlf/k-board,guswnsxodlf/k-board,hyesun03/k-board,cjh5414/kboard,darjeeling/k-board,hyesun03/k-board,kboard/kboard,cjh5414/kboard,guswnsxodlf/k-board,kboard/kboard,kboard/kboard,cjh5414/kboard,hyesun03/k-board
|
Add unittest of 'url_parameter' template tag
|
from django.core.urlresolvers import reverse
from .base import BoardAppTest
from board.templatetags.url_parameter import url_parameter
class UrlParameterTest(BoardAppTest):
def test_contains_correct_string(self):
parameter = {
'a': 13,
'query': 'hello',
'b': 'This is a test'
}
url_string = url_parameter(**parameter)
self.assertIn('a=13', url_string)
self.assertIn('query=hello', url_string)
self.assertIn('b=This is a test', url_string)
def test_returns_empty_string_when_there_is_no_data(self):
parameter = {}
url_string = url_parameter(**parameter)
self.assertEqual('', url_string)
|
<commit_before><commit_msg>Add unittest of 'url_parameter' template tag<commit_after>
|
from django.core.urlresolvers import reverse
from .base import BoardAppTest
from board.templatetags.url_parameter import url_parameter
class UrlParameterTest(BoardAppTest):
def test_contains_correct_string(self):
parameter = {
'a': 13,
'query': 'hello',
'b': 'This is a test'
}
url_string = url_parameter(**parameter)
self.assertIn('a=13', url_string)
self.assertIn('query=hello', url_string)
self.assertIn('b=This is a test', url_string)
def test_returns_empty_string_when_there_is_no_data(self):
parameter = {}
url_string = url_parameter(**parameter)
self.assertEqual('', url_string)
|
Add unittest of 'url_parameter' template tagfrom django.core.urlresolvers import reverse
from .base import BoardAppTest
from board.templatetags.url_parameter import url_parameter
class UrlParameterTest(BoardAppTest):
def test_contains_correct_string(self):
parameter = {
'a': 13,
'query': 'hello',
'b': 'This is a test'
}
url_string = url_parameter(**parameter)
self.assertIn('a=13', url_string)
self.assertIn('query=hello', url_string)
self.assertIn('b=This is a test', url_string)
def test_returns_empty_string_when_there_is_no_data(self):
parameter = {}
url_string = url_parameter(**parameter)
self.assertEqual('', url_string)
|
<commit_before><commit_msg>Add unittest of 'url_parameter' template tag<commit_after>from django.core.urlresolvers import reverse
from .base import BoardAppTest
from board.templatetags.url_parameter import url_parameter
class UrlParameterTest(BoardAppTest):
def test_contains_correct_string(self):
parameter = {
'a': 13,
'query': 'hello',
'b': 'This is a test'
}
url_string = url_parameter(**parameter)
self.assertIn('a=13', url_string)
self.assertIn('query=hello', url_string)
self.assertIn('b=This is a test', url_string)
def test_returns_empty_string_when_there_is_no_data(self):
parameter = {}
url_string = url_parameter(**parameter)
self.assertEqual('', url_string)
|
|
b1ca267f6aa27e151a2b7f75f948794291dd7d44
|
kpi/management/commands/copy_kc_profile.py
|
kpi/management/commands/copy_kc_profile.py
|
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from hub.models import ExtraUserDetail
from kpi.deployment_backends.kc_reader.utils import get_kc_profile_data
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-users',
action='store_true',
dest='all_users',
default=False,
help="Copy all users' profiles"),
make_option('--username',
action='store',
dest='username',
default=False,
help="Copy only a specific user's profiles"),
make_option('--again',
action='store_true',
dest='again',
default=False,
help='Usually, a KC profile is copied only once per user, '
'making it possible for the user to blank out a '
'field without having the old value from KC '
'reappear. To copy previously copied profiles again, '
'use this option'
),
)
def handle(self, *args, **options):
if options.get('all_users'):
users = User.objects.all()
elif options.get('username'):
users = User.objects.filter(username=options.get('username'))
else:
raise CommandError('No users selected!')
initial_count = users.count()
if not options.get('again'):
# Poor man's query within JSONField that saves a little time. We'll
# check `copied_kc_profile` again after parsing the JSON
users = users.exclude(
extra_details__data__contains='"copied_kc_profile":true')
copied_count = 0
for user in users:
extra_details, created = ExtraUserDetail.objects.get_or_create(
user=user)
if not extra_details.data.get('copied_kc_profile', False) or \
options.get('again'):
kc_detail = get_kc_profile_data(user.pk)
for k, v in kc_detail.iteritems():
if extra_details.data.get(k, None) is None:
extra_details.data[k] = v
extra_details.data['copied_kc_profile'] = True
copied_count += 1
extra_details.save()
self.stdout.write('Copied {} profile{}.'.format(
copied_count, '' if copied_count == 1 else 's'))
skipped_count = initial_count - copied_count
if skipped_count:
self.stdout.write('Skipped {} profile{}.'.format(
skipped_count, '' if skipped_count == 1 else 's'))
|
Include management command missing from a080693
|
Include management command missing from a080693
|
Python
|
agpl-3.0
|
onaio/kpi,kobotoolbox/kpi,onaio/kpi,onaio/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,onaio/kpi
|
Include management command missing from a080693
|
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from hub.models import ExtraUserDetail
from kpi.deployment_backends.kc_reader.utils import get_kc_profile_data
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-users',
action='store_true',
dest='all_users',
default=False,
help="Copy all users' profiles"),
make_option('--username',
action='store',
dest='username',
default=False,
help="Copy only a specific user's profiles"),
make_option('--again',
action='store_true',
dest='again',
default=False,
help='Usually, a KC profile is copied only once per user, '
'making it possible for the user to blank out a '
'field without having the old value from KC '
'reappear. To copy previously copied profiles again, '
'use this option'
),
)
def handle(self, *args, **options):
if options.get('all_users'):
users = User.objects.all()
elif options.get('username'):
users = User.objects.filter(username=options.get('username'))
else:
raise CommandError('No users selected!')
initial_count = users.count()
if not options.get('again'):
# Poor man's query within JSONField that saves a little time. We'll
# check `copied_kc_profile` again after parsing the JSON
users = users.exclude(
extra_details__data__contains='"copied_kc_profile":true')
copied_count = 0
for user in users:
extra_details, created = ExtraUserDetail.objects.get_or_create(
user=user)
if not extra_details.data.get('copied_kc_profile', False) or \
options.get('again'):
kc_detail = get_kc_profile_data(user.pk)
for k, v in kc_detail.iteritems():
if extra_details.data.get(k, None) is None:
extra_details.data[k] = v
extra_details.data['copied_kc_profile'] = True
copied_count += 1
extra_details.save()
self.stdout.write('Copied {} profile{}.'.format(
copied_count, '' if copied_count == 1 else 's'))
skipped_count = initial_count - copied_count
if skipped_count:
self.stdout.write('Skipped {} profile{}.'.format(
skipped_count, '' if skipped_count == 1 else 's'))
|
<commit_before><commit_msg>Include management command missing from a080693<commit_after>
|
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from hub.models import ExtraUserDetail
from kpi.deployment_backends.kc_reader.utils import get_kc_profile_data
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-users',
action='store_true',
dest='all_users',
default=False,
help="Copy all users' profiles"),
make_option('--username',
action='store',
dest='username',
default=False,
help="Copy only a specific user's profiles"),
make_option('--again',
action='store_true',
dest='again',
default=False,
help='Usually, a KC profile is copied only once per user, '
'making it possible for the user to blank out a '
'field without having the old value from KC '
'reappear. To copy previously copied profiles again, '
'use this option'
),
)
def handle(self, *args, **options):
if options.get('all_users'):
users = User.objects.all()
elif options.get('username'):
users = User.objects.filter(username=options.get('username'))
else:
raise CommandError('No users selected!')
initial_count = users.count()
if not options.get('again'):
# Poor man's query within JSONField that saves a little time. We'll
# check `copied_kc_profile` again after parsing the JSON
users = users.exclude(
extra_details__data__contains='"copied_kc_profile":true')
copied_count = 0
for user in users:
extra_details, created = ExtraUserDetail.objects.get_or_create(
user=user)
if not extra_details.data.get('copied_kc_profile', False) or \
options.get('again'):
kc_detail = get_kc_profile_data(user.pk)
for k, v in kc_detail.iteritems():
if extra_details.data.get(k, None) is None:
extra_details.data[k] = v
extra_details.data['copied_kc_profile'] = True
copied_count += 1
extra_details.save()
self.stdout.write('Copied {} profile{}.'.format(
copied_count, '' if copied_count == 1 else 's'))
skipped_count = initial_count - copied_count
if skipped_count:
self.stdout.write('Skipped {} profile{}.'.format(
skipped_count, '' if skipped_count == 1 else 's'))
|
Include management command missing from a080693from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from hub.models import ExtraUserDetail
from kpi.deployment_backends.kc_reader.utils import get_kc_profile_data
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-users',
action='store_true',
dest='all_users',
default=False,
help="Copy all users' profiles"),
make_option('--username',
action='store',
dest='username',
default=False,
help="Copy only a specific user's profiles"),
make_option('--again',
action='store_true',
dest='again',
default=False,
help='Usually, a KC profile is copied only once per user, '
'making it possible for the user to blank out a '
'field without having the old value from KC '
'reappear. To copy previously copied profiles again, '
'use this option'
),
)
def handle(self, *args, **options):
if options.get('all_users'):
users = User.objects.all()
elif options.get('username'):
users = User.objects.filter(username=options.get('username'))
else:
raise CommandError('No users selected!')
initial_count = users.count()
if not options.get('again'):
# Poor man's query within JSONField that saves a little time. We'll
# check `copied_kc_profile` again after parsing the JSON
users = users.exclude(
extra_details__data__contains='"copied_kc_profile":true')
copied_count = 0
for user in users:
extra_details, created = ExtraUserDetail.objects.get_or_create(
user=user)
if not extra_details.data.get('copied_kc_profile', False) or \
options.get('again'):
kc_detail = get_kc_profile_data(user.pk)
for k, v in kc_detail.iteritems():
if extra_details.data.get(k, None) is None:
extra_details.data[k] = v
extra_details.data['copied_kc_profile'] = True
copied_count += 1
extra_details.save()
self.stdout.write('Copied {} profile{}.'.format(
copied_count, '' if copied_count == 1 else 's'))
skipped_count = initial_count - copied_count
if skipped_count:
self.stdout.write('Skipped {} profile{}.'.format(
skipped_count, '' if skipped_count == 1 else 's'))
|
<commit_before><commit_msg>Include management command missing from a080693<commit_after>from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from hub.models import ExtraUserDetail
from kpi.deployment_backends.kc_reader.utils import get_kc_profile_data
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--all-users',
action='store_true',
dest='all_users',
default=False,
help="Copy all users' profiles"),
make_option('--username',
action='store',
dest='username',
default=False,
help="Copy only a specific user's profiles"),
make_option('--again',
action='store_true',
dest='again',
default=False,
help='Usually, a KC profile is copied only once per user, '
'making it possible for the user to blank out a '
'field without having the old value from KC '
'reappear. To copy previously copied profiles again, '
'use this option'
),
)
def handle(self, *args, **options):
if options.get('all_users'):
users = User.objects.all()
elif options.get('username'):
users = User.objects.filter(username=options.get('username'))
else:
raise CommandError('No users selected!')
initial_count = users.count()
if not options.get('again'):
# Poor man's query within JSONField that saves a little time. We'll
# check `copied_kc_profile` again after parsing the JSON
users = users.exclude(
extra_details__data__contains='"copied_kc_profile":true')
copied_count = 0
for user in users:
extra_details, created = ExtraUserDetail.objects.get_or_create(
user=user)
if not extra_details.data.get('copied_kc_profile', False) or \
options.get('again'):
kc_detail = get_kc_profile_data(user.pk)
for k, v in kc_detail.iteritems():
if extra_details.data.get(k, None) is None:
extra_details.data[k] = v
extra_details.data['copied_kc_profile'] = True
copied_count += 1
extra_details.save()
self.stdout.write('Copied {} profile{}.'.format(
copied_count, '' if copied_count == 1 else 's'))
skipped_count = initial_count - copied_count
if skipped_count:
self.stdout.write('Skipped {} profile{}.'.format(
skipped_count, '' if skipped_count == 1 else 's'))
|
|
1114f538fb19ac6e2d21849554bd0f1985420c60
|
whitenoise/django.py
|
whitenoise/django.py
|
raise ImportError(
'\n\n'
'Your WhiteNoise configuration is incompatible with WhiteNoise v4.0\n'
'This can be fixed by following the upgrade instructions at:\n'
'http://whitenoise.evans.io/en/stable/changelog.html#v4-0\n')
|
Raise with upgrade instructions on old import path
|
Raise with upgrade instructions on old import path
|
Python
|
mit
|
evansd/whitenoise,evansd/whitenoise,evansd/whitenoise
|
Raise with upgrade instructions on old import path
|
raise ImportError(
'\n\n'
'Your WhiteNoise configuration is incompatible with WhiteNoise v4.0\n'
'This can be fixed by following the upgrade instructions at:\n'
'http://whitenoise.evans.io/en/stable/changelog.html#v4-0\n')
|
<commit_before><commit_msg>Raise with upgrade instructions on old import path<commit_after>
|
raise ImportError(
'\n\n'
'Your WhiteNoise configuration is incompatible with WhiteNoise v4.0\n'
'This can be fixed by following the upgrade instructions at:\n'
'http://whitenoise.evans.io/en/stable/changelog.html#v4-0\n')
|
Raise with upgrade instructions on old import pathraise ImportError(
'\n\n'
'Your WhiteNoise configuration is incompatible with WhiteNoise v4.0\n'
'This can be fixed by following the upgrade instructions at:\n'
'http://whitenoise.evans.io/en/stable/changelog.html#v4-0\n')
|
<commit_before><commit_msg>Raise with upgrade instructions on old import path<commit_after>raise ImportError(
'\n\n'
'Your WhiteNoise configuration is incompatible with WhiteNoise v4.0\n'
'This can be fixed by following the upgrade instructions at:\n'
'http://whitenoise.evans.io/en/stable/changelog.html#v4-0\n')
|
|
af95d8fc8355b9762af1001f2313d64fe837a4dc
|
run_story.py
|
run_story.py
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
Implement a simple launcher for ZVM, to help test the machine.
|
Implement a simple launcher for ZVM, to help test the machine.
|
Python
|
bsd-3-clause
|
BGCX262/zvm-hg-to-git,BGCX262/zvm-hg-to-git
|
Implement a simple launcher for ZVM, to help test the machine.
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Implement a simple launcher for ZVM, to help test the machine.<commit_after>
|
#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
Implement a simple launcher for ZVM, to help test the machine.#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Implement a simple launcher for ZVM, to help test the machine.<commit_after>#!/usr/bin/env python
import sys
import os.path
from zvm import zmachine
def usage():
print """Usage: %s <story file>
Run a Z-Machine story under ZVM.
""" % sys.argv[0]
sys.exit(1)
def main():
if len(sys.argv) != 2:
usage()
story_file = sys.argv[1]
if not os.path.isfile(story_file):
print "%s is not a file." % story_file
usage()
try:
f = file(story_file)
story_image = f.read()
f.close()
except IOError:
print "Error accessing %s" % story_file
sys.exit(1)
machine = zmachine.ZMachine(story_image)
machine.run()
if __name__ == '__main__':
main()
|
|
a6839580266deadb4a8b5c1f92f2cd2b58a7b42b
|
DataTag/management/commands/appendtag.py
|
DataTag/management/commands/appendtag.py
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2017 Rémi Duraffort
# This file is part of DataTag.
#
# DataTag is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DataTag is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with DataTag. If not, see <http://www.gnu.org/licenses/>
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Append the given tag to the given files"
def add_arguments(self, parser):
parser.add_argument("--tag", type=str)
parser.add_argument("files", nargs="+", type=str)
def handle(self, *args, **options):
with open(".DataTag.yaml", "a") as f_conf:
f_conf.write("- pattern: [%s]\n" % ", ".join(options["files"]))
f_conf.write(" tags:\n")
f_conf.write(" - %s\n" % options["tag"])
|
Add a command to append tags quickly
|
Add a command to append tags quickly
|
Python
|
agpl-3.0
|
ivoire/DataTag,ivoire/DataTag,ivoire/DataTag
|
Add a command to append tags quickly
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2017 Rémi Duraffort
# This file is part of DataTag.
#
# DataTag is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DataTag is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with DataTag. If not, see <http://www.gnu.org/licenses/>
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Append the given tag to the given files"
def add_arguments(self, parser):
parser.add_argument("--tag", type=str)
parser.add_argument("files", nargs="+", type=str)
def handle(self, *args, **options):
with open(".DataTag.yaml", "a") as f_conf:
f_conf.write("- pattern: [%s]\n" % ", ".join(options["files"]))
f_conf.write(" tags:\n")
f_conf.write(" - %s\n" % options["tag"])
|
<commit_before><commit_msg>Add a command to append tags quickly<commit_after>
|
# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2017 Rémi Duraffort
# This file is part of DataTag.
#
# DataTag is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DataTag is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with DataTag. If not, see <http://www.gnu.org/licenses/>
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Append the given tag to the given files"
def add_arguments(self, parser):
parser.add_argument("--tag", type=str)
parser.add_argument("files", nargs="+", type=str)
def handle(self, *args, **options):
with open(".DataTag.yaml", "a") as f_conf:
f_conf.write("- pattern: [%s]\n" % ", ".join(options["files"]))
f_conf.write(" tags:\n")
f_conf.write(" - %s\n" % options["tag"])
|
Add a command to append tags quickly# -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2017 Rémi Duraffort
# This file is part of DataTag.
#
# DataTag is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DataTag is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with DataTag. If not, see <http://www.gnu.org/licenses/>
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Append the given tag to the given files"
def add_arguments(self, parser):
parser.add_argument("--tag", type=str)
parser.add_argument("files", nargs="+", type=str)
def handle(self, *args, **options):
with open(".DataTag.yaml", "a") as f_conf:
f_conf.write("- pattern: [%s]\n" % ", ".join(options["files"]))
f_conf.write(" tags:\n")
f_conf.write(" - %s\n" % options["tag"])
|
<commit_before><commit_msg>Add a command to append tags quickly<commit_after># -*- coding: utf-8 -*-
# vim: set ts=4
# Copyright 2017 Rémi Duraffort
# This file is part of DataTag.
#
# DataTag is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# DataTag is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with DataTag. If not, see <http://www.gnu.org/licenses/>
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Append the given tag to the given files"
def add_arguments(self, parser):
parser.add_argument("--tag", type=str)
parser.add_argument("files", nargs="+", type=str)
def handle(self, *args, **options):
with open(".DataTag.yaml", "a") as f_conf:
f_conf.write("- pattern: [%s]\n" % ", ".join(options["files"]))
f_conf.write(" tags:\n")
f_conf.write(" - %s\n" % options["tag"])
|
|
7f5ebaa18bb6c0634ab1135a5915cd8ee593760d
|
tests/unit/test_hashes.py
|
tests/unit/test_hashes.py
|
# -*- coding: utf-8 -*-
# Import sorbic libs
import sorbic.db
# Import python libs
import os
import shutil
import unittest
import tempfile
try:
import libnacl.blake
HAS_BLAKE = True
except ImportError:
HAS_BLAKE = False
class TestDB(unittest.TestCase):
'''
'''
def _run_test(self, key_hash):
w_dir = tempfile.mkdtemp()
root = os.path.join(w_dir, 'db_root')
db = sorbic.db.DB(root, key_hash=key_hash)
data = {1:2}
db.insert('foo', data)
pull_data = db.get('foo')
self.assertEqual(data, pull_data)
shutil.rmtree(w_dir)
def test_blake(self):
if not HAS_BLAKE:
return
self._run_test('blake')
def test_algos(self):
# don't use hashlib.algorithms, need to support 2.6
for algo in ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'):
self._run_test(algo)
|
Add tests for multiple hashes
|
Add tests for multiple hashes
|
Python
|
apache-2.0
|
s0undt3ch/sorbic,thatch45/sorbic
|
Add tests for multiple hashes
|
# -*- coding: utf-8 -*-
# Import sorbic libs
import sorbic.db
# Import python libs
import os
import shutil
import unittest
import tempfile
try:
import libnacl.blake
HAS_BLAKE = True
except ImportError:
HAS_BLAKE = False
class TestDB(unittest.TestCase):
'''
'''
def _run_test(self, key_hash):
w_dir = tempfile.mkdtemp()
root = os.path.join(w_dir, 'db_root')
db = sorbic.db.DB(root, key_hash=key_hash)
data = {1:2}
db.insert('foo', data)
pull_data = db.get('foo')
self.assertEqual(data, pull_data)
shutil.rmtree(w_dir)
def test_blake(self):
if not HAS_BLAKE:
return
self._run_test('blake')
def test_algos(self):
# don't use hashlib.algorithms, need to support 2.6
for algo in ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'):
self._run_test(algo)
|
<commit_before><commit_msg>Add tests for multiple hashes<commit_after>
|
# -*- coding: utf-8 -*-
# Import sorbic libs
import sorbic.db
# Import python libs
import os
import shutil
import unittest
import tempfile
try:
import libnacl.blake
HAS_BLAKE = True
except ImportError:
HAS_BLAKE = False
class TestDB(unittest.TestCase):
'''
'''
def _run_test(self, key_hash):
w_dir = tempfile.mkdtemp()
root = os.path.join(w_dir, 'db_root')
db = sorbic.db.DB(root, key_hash=key_hash)
data = {1:2}
db.insert('foo', data)
pull_data = db.get('foo')
self.assertEqual(data, pull_data)
shutil.rmtree(w_dir)
def test_blake(self):
if not HAS_BLAKE:
return
self._run_test('blake')
def test_algos(self):
# don't use hashlib.algorithms, need to support 2.6
for algo in ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'):
self._run_test(algo)
|
Add tests for multiple hashes# -*- coding: utf-8 -*-
# Import sorbic libs
import sorbic.db
# Import python libs
import os
import shutil
import unittest
import tempfile
try:
import libnacl.blake
HAS_BLAKE = True
except ImportError:
HAS_BLAKE = False
class TestDB(unittest.TestCase):
'''
'''
def _run_test(self, key_hash):
w_dir = tempfile.mkdtemp()
root = os.path.join(w_dir, 'db_root')
db = sorbic.db.DB(root, key_hash=key_hash)
data = {1:2}
db.insert('foo', data)
pull_data = db.get('foo')
self.assertEqual(data, pull_data)
shutil.rmtree(w_dir)
def test_blake(self):
if not HAS_BLAKE:
return
self._run_test('blake')
def test_algos(self):
# don't use hashlib.algorithms, need to support 2.6
for algo in ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'):
self._run_test(algo)
|
<commit_before><commit_msg>Add tests for multiple hashes<commit_after># -*- coding: utf-8 -*-
# Import sorbic libs
import sorbic.db
# Import python libs
import os
import shutil
import unittest
import tempfile
try:
import libnacl.blake
HAS_BLAKE = True
except ImportError:
HAS_BLAKE = False
class TestDB(unittest.TestCase):
'''
'''
def _run_test(self, key_hash):
w_dir = tempfile.mkdtemp()
root = os.path.join(w_dir, 'db_root')
db = sorbic.db.DB(root, key_hash=key_hash)
data = {1:2}
db.insert('foo', data)
pull_data = db.get('foo')
self.assertEqual(data, pull_data)
shutil.rmtree(w_dir)
def test_blake(self):
if not HAS_BLAKE:
return
self._run_test('blake')
def test_algos(self):
# don't use hashlib.algorithms, need to support 2.6
for algo in ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512'):
self._run_test(algo)
|
|
b8655fb75c2ab7487086422a61cb9bba818be298
|
bin/converter.py
|
bin/converter.py
|
#!/usr/bin/env python
import argparse
import os.path
import pandas as pd
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HDF5 to other '
'format')
parser.add_argument('filename', type=str, help='HDF5 filename')
parser.add_argument('format', type=str, choices=['html', 'xlsx'],
help='Format to which file will be converted')
parser.add_argument('-o', '--outdir', type=str, help='Output directory')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier of data frame in HDF5 file')
args = parser.parse_args()
df = pd.read_hdf(args.filename, args.hdf_key)
filename, _ = os.path.splitext(os.path.basename(args.filename))
if args.outdir is not None:
outfile = os.path.join(args.outdir, filename)
if args.format == 'html':
df.to_html('{}.html'.format(outfile), float_format='{:0.3f}'.format)
else:
df.to_excel('{}.xlsx'.format(outfile), float_format='%0.3f',
engine='xlsxwriter')
|
Write script to convert HDF5 to other format
|
Write script to convert HDF5 to other format
|
Python
|
mit
|
kemskems/otdet
|
Write script to convert HDF5 to other format
|
#!/usr/bin/env python
import argparse
import os.path
import pandas as pd
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HDF5 to other '
'format')
parser.add_argument('filename', type=str, help='HDF5 filename')
parser.add_argument('format', type=str, choices=['html', 'xlsx'],
help='Format to which file will be converted')
parser.add_argument('-o', '--outdir', type=str, help='Output directory')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier of data frame in HDF5 file')
args = parser.parse_args()
df = pd.read_hdf(args.filename, args.hdf_key)
filename, _ = os.path.splitext(os.path.basename(args.filename))
if args.outdir is not None:
outfile = os.path.join(args.outdir, filename)
if args.format == 'html':
df.to_html('{}.html'.format(outfile), float_format='{:0.3f}'.format)
else:
df.to_excel('{}.xlsx'.format(outfile), float_format='%0.3f',
engine='xlsxwriter')
|
<commit_before><commit_msg>Write script to convert HDF5 to other format<commit_after>
|
#!/usr/bin/env python
import argparse
import os.path
import pandas as pd
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HDF5 to other '
'format')
parser.add_argument('filename', type=str, help='HDF5 filename')
parser.add_argument('format', type=str, choices=['html', 'xlsx'],
help='Format to which file will be converted')
parser.add_argument('-o', '--outdir', type=str, help='Output directory')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier of data frame in HDF5 file')
args = parser.parse_args()
df = pd.read_hdf(args.filename, args.hdf_key)
filename, _ = os.path.splitext(os.path.basename(args.filename))
if args.outdir is not None:
outfile = os.path.join(args.outdir, filename)
if args.format == 'html':
df.to_html('{}.html'.format(outfile), float_format='{:0.3f}'.format)
else:
df.to_excel('{}.xlsx'.format(outfile), float_format='%0.3f',
engine='xlsxwriter')
|
Write script to convert HDF5 to other format#!/usr/bin/env python
import argparse
import os.path
import pandas as pd
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HDF5 to other '
'format')
parser.add_argument('filename', type=str, help='HDF5 filename')
parser.add_argument('format', type=str, choices=['html', 'xlsx'],
help='Format to which file will be converted')
parser.add_argument('-o', '--outdir', type=str, help='Output directory')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier of data frame in HDF5 file')
args = parser.parse_args()
df = pd.read_hdf(args.filename, args.hdf_key)
filename, _ = os.path.splitext(os.path.basename(args.filename))
if args.outdir is not None:
outfile = os.path.join(args.outdir, filename)
if args.format == 'html':
df.to_html('{}.html'.format(outfile), float_format='{:0.3f}'.format)
else:
df.to_excel('{}.xlsx'.format(outfile), float_format='%0.3f',
engine='xlsxwriter')
|
<commit_before><commit_msg>Write script to convert HDF5 to other format<commit_after>#!/usr/bin/env python
import argparse
import os.path
import pandas as pd
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Convert HDF5 to other '
'format')
parser.add_argument('filename', type=str, help='HDF5 filename')
parser.add_argument('format', type=str, choices=['html', 'xlsx'],
help='Format to which file will be converted')
parser.add_argument('-o', '--outdir', type=str, help='Output directory')
parser.add_argument('--hdf-key', type=str, default='df',
help='Identifier of data frame in HDF5 file')
args = parser.parse_args()
df = pd.read_hdf(args.filename, args.hdf_key)
filename, _ = os.path.splitext(os.path.basename(args.filename))
if args.outdir is not None:
outfile = os.path.join(args.outdir, filename)
if args.format == 'html':
df.to_html('{}.html'.format(outfile), float_format='{:0.3f}'.format)
else:
df.to_excel('{}.xlsx'.format(outfile), float_format='%0.3f',
engine='xlsxwriter')
|
|
30843e3295d8a387b8fc61dea79ea47156266dad
|
build/transform-sql.py
|
build/transform-sql.py
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
Add a helper script which will transform a file full of SQL commands into a header file suitable for inclusion in a standard C file and use with the Subversion SQLite APIs. The goal here is that we can maintain our SQL schema directly as such, and let this script do the appropriate transformation as part of autogen.sh.
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@875425 13f79535-47bb-0310-9956-ffa450edef68
|
Python
|
apache-2.0
|
YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@875425 13f79535-47bb-0310-9956-ffa450edef68
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
<commit_before><commit_msg>Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@875425 13f79535-47bb-0310-9956-ffa450edef68<commit_after>
|
#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@875425 13f79535-47bb-0310-9956-ffa450edef68#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
<commit_before><commit_msg>Add a helper script which will transform a file full of SQL commands into a
header file suitable for inclusion in a standard C file and use with the
Subversion SQLite APIs. The goal here is that we can maintain our SQL schema
directly as such, and let this script do the appropriate transformation as
part of autogen.sh.
* build/transform-sql.py:
New.
git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@875425 13f79535-47bb-0310-9956-ffa450edef68<commit_after>#!/usr/bin/env python
#
# transform-sql.py -- create a header file with the appropriate SQL variables
# from an SQL file
#
import os
import re
import sys
def usage_and_exit(msg):
if msg:
sys.stderr.write("%s\n\n" % msg)
sys.stderr.write("usage: %s [sqlite_file]\n" % \
os.path.basename(sys.argv[0]))
sys.stderr.flush()
sys.exit(1)
def main(input_filename, output_filename):
input = open(input_filename, "r")
output = open(output_filename, "w")
var_name = os.path.basename(input_filename).replace('.', '_')
var_name = var_name.replace('-', '_')
output.write('static const char * const %s[] = { NULL,\n' % var_name)
in_comment = False
for line in input:
line = line.replace('\n', '')
line = line.replace('"', '\\"')
if line:
output.write(' "' + line + '"\n')
else:
output.write(' APR_EOL_STR\n')
output.write(' };')
input.close()
output.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
usage_and_exit("Incorrect number of arguments")
main(sys.argv[1], sys.argv[1] + ".h")
|
|
69d2611a153733a7ff8f7e77df334469ecf56b01
|
src/utils/remove_uncessessary_rules.py
|
src/utils/remove_uncessessary_rules.py
|
import glob
import os
import re
def process(text, start=None):
# Full Definition
# ((.*) =\s+([\s\S]*?)\s*;\n)
# Group1 is the full rule definition
# Group2 is only the name of the rule
# Group3 is the definition of the rule
# Definitions within this definition
# (?<!')\b\w+\b(?!')
# Excludes keywords/terminals
# List of rule paths to check and leave. Any not needed will be removed
if start is None:
start = ['start', ]
rules_required = set(start)
rule_definition = re.compile(r"((.*) =\s+([\s\S]*?)\s*;\n)")
rules_in_definition = re.compile(r"(?<!')\b\w+\b(?!')")
rules_directory = {}
found = rule_definition.findall(text)
for _, name, definition in found:
rules_directory[name] = rules_in_definition.findall(definition)
found_new = True
while found_new:
found_new = False
for rule_name in list(rules_required):
if rules_directory.get(rule_name) is None:
continue
new = set(rules_directory.get(rule_name)) - rules_required
rules_required = rules_required | set(rules_directory[rule_name])
if new:
found_new = True
for full, name, _ in found:
# Exclude rules starting with underscore
if name.startswith('_'):
continue
if name not in rules_required:
text = text.replace('\n' + full + '\n', '\n')
return text
if __name__ == '__main__':
os.chdir("../")
for file in glob.glob("*.grako"):
print("Starting {file}".format(file=file))
with open(file) as f:
text = f.read()
text = process(text)
with open(file, 'w') as f:
f.write(text)
print("Finished {file}".format(file=file))
|
Add utility to remove un-needed rules
|
Add utility to remove un-needed rules
|
Python
|
mit
|
vmuriart/sqldef
|
Add utility to remove un-needed rules
|
import glob
import os
import re
def process(text, start=None):
# Full Definition
# ((.*) =\s+([\s\S]*?)\s*;\n)
# Group1 is the full rule definition
# Group2 is only the name of the rule
# Group3 is the definition of the rule
# Definitions within this definition
# (?<!')\b\w+\b(?!')
# Excludes keywords/terminals
# List of rule paths to check and leave. Any not needed will be removed
if start is None:
start = ['start', ]
rules_required = set(start)
rule_definition = re.compile(r"((.*) =\s+([\s\S]*?)\s*;\n)")
rules_in_definition = re.compile(r"(?<!')\b\w+\b(?!')")
rules_directory = {}
found = rule_definition.findall(text)
for _, name, definition in found:
rules_directory[name] = rules_in_definition.findall(definition)
found_new = True
while found_new:
found_new = False
for rule_name in list(rules_required):
if rules_directory.get(rule_name) is None:
continue
new = set(rules_directory.get(rule_name)) - rules_required
rules_required = rules_required | set(rules_directory[rule_name])
if new:
found_new = True
for full, name, _ in found:
# Exclude rules starting with underscore
if name.startswith('_'):
continue
if name not in rules_required:
text = text.replace('\n' + full + '\n', '\n')
return text
if __name__ == '__main__':
os.chdir("../")
for file in glob.glob("*.grako"):
print("Starting {file}".format(file=file))
with open(file) as f:
text = f.read()
text = process(text)
with open(file, 'w') as f:
f.write(text)
print("Finished {file}".format(file=file))
|
<commit_before><commit_msg>Add utility to remove un-needed rules<commit_after>
|
import glob
import os
import re
def process(text, start=None):
# Full Definition
# ((.*) =\s+([\s\S]*?)\s*;\n)
# Group1 is the full rule definition
# Group2 is only the name of the rule
# Group3 is the definition of the rule
# Definitions within this definition
# (?<!')\b\w+\b(?!')
# Excludes keywords/terminals
# List of rule paths to check and leave. Any not needed will be removed
if start is None:
start = ['start', ]
rules_required = set(start)
rule_definition = re.compile(r"((.*) =\s+([\s\S]*?)\s*;\n)")
rules_in_definition = re.compile(r"(?<!')\b\w+\b(?!')")
rules_directory = {}
found = rule_definition.findall(text)
for _, name, definition in found:
rules_directory[name] = rules_in_definition.findall(definition)
found_new = True
while found_new:
found_new = False
for rule_name in list(rules_required):
if rules_directory.get(rule_name) is None:
continue
new = set(rules_directory.get(rule_name)) - rules_required
rules_required = rules_required | set(rules_directory[rule_name])
if new:
found_new = True
for full, name, _ in found:
# Exclude rules starting with underscore
if name.startswith('_'):
continue
if name not in rules_required:
text = text.replace('\n' + full + '\n', '\n')
return text
if __name__ == '__main__':
os.chdir("../")
for file in glob.glob("*.grako"):
print("Starting {file}".format(file=file))
with open(file) as f:
text = f.read()
text = process(text)
with open(file, 'w') as f:
f.write(text)
print("Finished {file}".format(file=file))
|
Add utility to remove un-needed rulesimport glob
import os
import re
def process(text, start=None):
# Full Definition
# ((.*) =\s+([\s\S]*?)\s*;\n)
# Group1 is the full rule definition
# Group2 is only the name of the rule
# Group3 is the definition of the rule
# Definitions within this definition
# (?<!')\b\w+\b(?!')
# Excludes keywords/terminals
# List of rule paths to check and leave. Any not needed will be removed
if start is None:
start = ['start', ]
rules_required = set(start)
rule_definition = re.compile(r"((.*) =\s+([\s\S]*?)\s*;\n)")
rules_in_definition = re.compile(r"(?<!')\b\w+\b(?!')")
rules_directory = {}
found = rule_definition.findall(text)
for _, name, definition in found:
rules_directory[name] = rules_in_definition.findall(definition)
found_new = True
while found_new:
found_new = False
for rule_name in list(rules_required):
if rules_directory.get(rule_name) is None:
continue
new = set(rules_directory.get(rule_name)) - rules_required
rules_required = rules_required | set(rules_directory[rule_name])
if new:
found_new = True
for full, name, _ in found:
# Exclude rules starting with underscore
if name.startswith('_'):
continue
if name not in rules_required:
text = text.replace('\n' + full + '\n', '\n')
return text
if __name__ == '__main__':
os.chdir("../")
for file in glob.glob("*.grako"):
print("Starting {file}".format(file=file))
with open(file) as f:
text = f.read()
text = process(text)
with open(file, 'w') as f:
f.write(text)
print("Finished {file}".format(file=file))
|
<commit_before><commit_msg>Add utility to remove un-needed rules<commit_after>import glob
import os
import re
def process(text, start=None):
# Full Definition
# ((.*) =\s+([\s\S]*?)\s*;\n)
# Group1 is the full rule definition
# Group2 is only the name of the rule
# Group3 is the definition of the rule
# Definitions within this definition
# (?<!')\b\w+\b(?!')
# Excludes keywords/terminals
# List of rule paths to check and leave. Any not needed will be removed
if start is None:
start = ['start', ]
rules_required = set(start)
rule_definition = re.compile(r"((.*) =\s+([\s\S]*?)\s*;\n)")
rules_in_definition = re.compile(r"(?<!')\b\w+\b(?!')")
rules_directory = {}
found = rule_definition.findall(text)
for _, name, definition in found:
rules_directory[name] = rules_in_definition.findall(definition)
found_new = True
while found_new:
found_new = False
for rule_name in list(rules_required):
if rules_directory.get(rule_name) is None:
continue
new = set(rules_directory.get(rule_name)) - rules_required
rules_required = rules_required | set(rules_directory[rule_name])
if new:
found_new = True
for full, name, _ in found:
# Exclude rules starting with underscore
if name.startswith('_'):
continue
if name not in rules_required:
text = text.replace('\n' + full + '\n', '\n')
return text
if __name__ == '__main__':
os.chdir("../")
for file in glob.glob("*.grako"):
print("Starting {file}".format(file=file))
with open(file) as f:
text = f.read()
text = process(text)
with open(file, 'w') as f:
f.write(text)
print("Finished {file}".format(file=file))
|
|
88d9ac54125aafe7a7551d6d23e4e2f2e77f8d33
|
Spacewalk/listKernelAPI.py
|
Spacewalk/listKernelAPI.py
|
'''
# ..######.
# .##....##
# .##......
# ..######.
# .......##
# .##....##
# ..######.
################################################################################
#
# Script: List of running kernel of spacewalk client
#
################################################################################
'''
from __future__ import print_function
from xmlrpc import client
from datetime import date
from datetime import timedelta
import re
SATELLITE_URL = "http://SRVLNXMGNT-01.logival.local/rpc/api"
webservice = client.Server(SATELLITE_URL, verbose=0)
key = webservice.auth.login("admin", "ktnTp4oOgzTmdcfOoe5v")
today = date.today()
yesterday = today - timedelta(days=1)
list = webservice.system.list_systems(key)
for system in list:
lastboot = webservice.system.get_details(key,system.get('id'))
currKern = webservice.system.get_running_kernel(key,system.get('id'))
matchObj = re.search(".*\'last_boot\'\: \<DateTime \'(.*?)\'\ at.*", str(lastboot))
print(currKern,system.get('name'),matchObj.group(1))
webservice.auth.logout(key)
|
Add report kernel with API for Spacewalk
|
Add report kernel with API for Spacewalk
|
Python
|
mit
|
seb2020/toolbox,seb2020/toolbox,seb2020/toolbox,seb2020/toolbox,seb2020/toolbox
|
Add report kernel with API for Spacewalk
|
'''
# ..######.
# .##....##
# .##......
# ..######.
# .......##
# .##....##
# ..######.
################################################################################
#
# Script: List of running kernel of spacewalk client
#
################################################################################
'''
from __future__ import print_function
from xmlrpc import client
from datetime import date
from datetime import timedelta
import re
SATELLITE_URL = "http://SRVLNXMGNT-01.logival.local/rpc/api"
webservice = client.Server(SATELLITE_URL, verbose=0)
key = webservice.auth.login("admin", "ktnTp4oOgzTmdcfOoe5v")
today = date.today()
yesterday = today - timedelta(days=1)
list = webservice.system.list_systems(key)
for system in list:
lastboot = webservice.system.get_details(key,system.get('id'))
currKern = webservice.system.get_running_kernel(key,system.get('id'))
matchObj = re.search(".*\'last_boot\'\: \<DateTime \'(.*?)\'\ at.*", str(lastboot))
print(currKern,system.get('name'),matchObj.group(1))
webservice.auth.logout(key)
|
<commit_before><commit_msg>Add report kernel with API for Spacewalk<commit_after>
|
'''
# ..######.
# .##....##
# .##......
# ..######.
# .......##
# .##....##
# ..######.
################################################################################
#
# Script: List of running kernel of spacewalk client
#
################################################################################
'''
from __future__ import print_function
from xmlrpc import client
from datetime import date
from datetime import timedelta
import re
SATELLITE_URL = "http://SRVLNXMGNT-01.logival.local/rpc/api"
webservice = client.Server(SATELLITE_URL, verbose=0)
key = webservice.auth.login("admin", "ktnTp4oOgzTmdcfOoe5v")
today = date.today()
yesterday = today - timedelta(days=1)
list = webservice.system.list_systems(key)
for system in list:
lastboot = webservice.system.get_details(key,system.get('id'))
currKern = webservice.system.get_running_kernel(key,system.get('id'))
matchObj = re.search(".*\'last_boot\'\: \<DateTime \'(.*?)\'\ at.*", str(lastboot))
print(currKern,system.get('name'),matchObj.group(1))
webservice.auth.logout(key)
|
Add report kernel with API for Spacewalk'''
# ..######.
# .##....##
# .##......
# ..######.
# .......##
# .##....##
# ..######.
################################################################################
#
# Script: List of running kernel of spacewalk client
#
################################################################################
'''
from __future__ import print_function
from xmlrpc import client
from datetime import date
from datetime import timedelta
import re
SATELLITE_URL = "http://SRVLNXMGNT-01.logival.local/rpc/api"
webservice = client.Server(SATELLITE_URL, verbose=0)
key = webservice.auth.login("admin", "ktnTp4oOgzTmdcfOoe5v")
today = date.today()
yesterday = today - timedelta(days=1)
list = webservice.system.list_systems(key)
for system in list:
lastboot = webservice.system.get_details(key,system.get('id'))
currKern = webservice.system.get_running_kernel(key,system.get('id'))
matchObj = re.search(".*\'last_boot\'\: \<DateTime \'(.*?)\'\ at.*", str(lastboot))
print(currKern,system.get('name'),matchObj.group(1))
webservice.auth.logout(key)
|
<commit_before><commit_msg>Add report kernel with API for Spacewalk<commit_after>'''
# ..######.
# .##....##
# .##......
# ..######.
# .......##
# .##....##
# ..######.
################################################################################
#
# Script: List of running kernel of spacewalk client
#
################################################################################
'''
from __future__ import print_function
from xmlrpc import client
from datetime import date
from datetime import timedelta
import re
SATELLITE_URL = "http://SRVLNXMGNT-01.logival.local/rpc/api"
webservice = client.Server(SATELLITE_URL, verbose=0)
key = webservice.auth.login("admin", "ktnTp4oOgzTmdcfOoe5v")
today = date.today()
yesterday = today - timedelta(days=1)
list = webservice.system.list_systems(key)
for system in list:
lastboot = webservice.system.get_details(key,system.get('id'))
currKern = webservice.system.get_running_kernel(key,system.get('id'))
matchObj = re.search(".*\'last_boot\'\: \<DateTime \'(.*?)\'\ at.*", str(lastboot))
print(currKern,system.get('name'),matchObj.group(1))
webservice.auth.logout(key)
|
|
db86c21577e50b30ee222c47aae43e49a574c007
|
test/requests/run-integration-tests.py
|
test/requests/run-integration-tests.py
|
import sys
from test_login_local import TestLoginLocal
from test_registration import TestRegistration
from unittest import TestSuite, TextTestRunner, TestLoader
test_cases = [
TestLoginLocal,
TestRegistration
]
def suite(gn2_url, es_url):
the_suite = TestSuite()
for case in test_cases:
the_suite.addTests(initTest(case, gn2_url, es_url))
return the_suite
def initTest(klass, gn2_url, es_url):
loader = TestLoader()
methodNames = loader.getTestCaseNames(klass)
return [klass(mname, gn2_url, es_url) for mname in methodNames]
def main(gn2_url, es_url):
runner = TextTestRunner()
runner.run(suite(gn2_url, es_url))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("Required arguments missing:\n\tTry running `run-integration-test.py <gn2-url> <es-url>`")
else:
main(sys.argv[1], sys.argv[2])
|
Add a runner for all integration tests.
|
Add a runner for all integration tests.
|
Python
|
agpl-3.0
|
DannyArends/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2
|
Add a runner for all integration tests.
|
import sys
from test_login_local import TestLoginLocal
from test_registration import TestRegistration
from unittest import TestSuite, TextTestRunner, TestLoader
test_cases = [
TestLoginLocal,
TestRegistration
]
def suite(gn2_url, es_url):
the_suite = TestSuite()
for case in test_cases:
the_suite.addTests(initTest(case, gn2_url, es_url))
return the_suite
def initTest(klass, gn2_url, es_url):
loader = TestLoader()
methodNames = loader.getTestCaseNames(klass)
return [klass(mname, gn2_url, es_url) for mname in methodNames]
def main(gn2_url, es_url):
runner = TextTestRunner()
runner.run(suite(gn2_url, es_url))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("Required arguments missing:\n\tTry running `run-integration-test.py <gn2-url> <es-url>`")
else:
main(sys.argv[1], sys.argv[2])
|
<commit_before><commit_msg>Add a runner for all integration tests.<commit_after>
|
import sys
from test_login_local import TestLoginLocal
from test_registration import TestRegistration
from unittest import TestSuite, TextTestRunner, TestLoader
test_cases = [
TestLoginLocal,
TestRegistration
]
def suite(gn2_url, es_url):
the_suite = TestSuite()
for case in test_cases:
the_suite.addTests(initTest(case, gn2_url, es_url))
return the_suite
def initTest(klass, gn2_url, es_url):
loader = TestLoader()
methodNames = loader.getTestCaseNames(klass)
return [klass(mname, gn2_url, es_url) for mname in methodNames]
def main(gn2_url, es_url):
runner = TextTestRunner()
runner.run(suite(gn2_url, es_url))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("Required arguments missing:\n\tTry running `run-integration-test.py <gn2-url> <es-url>`")
else:
main(sys.argv[1], sys.argv[2])
|
Add a runner for all integration tests.import sys
from test_login_local import TestLoginLocal
from test_registration import TestRegistration
from unittest import TestSuite, TextTestRunner, TestLoader
test_cases = [
TestLoginLocal,
TestRegistration
]
def suite(gn2_url, es_url):
the_suite = TestSuite()
for case in test_cases:
the_suite.addTests(initTest(case, gn2_url, es_url))
return the_suite
def initTest(klass, gn2_url, es_url):
loader = TestLoader()
methodNames = loader.getTestCaseNames(klass)
return [klass(mname, gn2_url, es_url) for mname in methodNames]
def main(gn2_url, es_url):
runner = TextTestRunner()
runner.run(suite(gn2_url, es_url))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("Required arguments missing:\n\tTry running `run-integration-test.py <gn2-url> <es-url>`")
else:
main(sys.argv[1], sys.argv[2])
|
<commit_before><commit_msg>Add a runner for all integration tests.<commit_after>import sys
from test_login_local import TestLoginLocal
from test_registration import TestRegistration
from unittest import TestSuite, TextTestRunner, TestLoader
test_cases = [
TestLoginLocal,
TestRegistration
]
def suite(gn2_url, es_url):
the_suite = TestSuite()
for case in test_cases:
the_suite.addTests(initTest(case, gn2_url, es_url))
return the_suite
def initTest(klass, gn2_url, es_url):
loader = TestLoader()
methodNames = loader.getTestCaseNames(klass)
return [klass(mname, gn2_url, es_url) for mname in methodNames]
def main(gn2_url, es_url):
runner = TextTestRunner()
runner.run(suite(gn2_url, es_url))
if __name__ == "__main__":
if len(sys.argv) < 3:
raise Exception("Required arguments missing:\n\tTry running `run-integration-test.py <gn2-url> <es-url>`")
else:
main(sys.argv[1], sys.argv[2])
|
|
ce79a9fbbfcd26c7e6dee49643b10ef2ba8c953d
|
distutilazy/command/clean_jython_class.py
|
distutilazy/command/clean_jython_class.py
|
"""
distutilazy.command.clean_jython_class
--------------------------------------
Command to clean compiled .class files created
by Jython.
:license: MIT, see LICENSE for more details.
"""
from os.path import abspath, dirname
import sys
base_dir = abspath(dirname(dirname(dirname(__file__))))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
import distutilazy.clean
class clean_jython_class(distutilazy.clean.CleanJythonClass):
pass
|
Add clean jython class command module
|
Add clean jython class command module
So it's possible to use this command when users add
distutilazy to their setup.cfg instead of
using command classes in their setup.py cmdclass.
|
Python
|
mit
|
farzadghanei/distutilazy
|
Add clean jython class command module
So it's possible to use this command when users add
distutilazy to their setup.cfg instead of
using command classes in their setup.py cmdclass.
|
"""
distutilazy.command.clean_jython_class
--------------------------------------
Command to clean compiled .class files created
by Jython.
:license: MIT, see LICENSE for more details.
"""
from os.path import abspath, dirname
import sys
base_dir = abspath(dirname(dirname(dirname(__file__))))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
import distutilazy.clean
class clean_jython_class(distutilazy.clean.CleanJythonClass):
pass
|
<commit_before><commit_msg>Add clean jython class command module
So it's possible to use this command when users add
distutilazy to their setup.cfg instead of
using command classes in their setup.py cmdclass.<commit_after>
|
"""
distutilazy.command.clean_jython_class
--------------------------------------
Command to clean compiled .class files created
by Jython.
:license: MIT, see LICENSE for more details.
"""
from os.path import abspath, dirname
import sys
base_dir = abspath(dirname(dirname(dirname(__file__))))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
import distutilazy.clean
class clean_jython_class(distutilazy.clean.CleanJythonClass):
pass
|
Add clean jython class command module
So it's possible to use this command when users add
distutilazy to their setup.cfg instead of
using command classes in their setup.py cmdclass."""
distutilazy.command.clean_jython_class
--------------------------------------
Command to clean compiled .class files created
by Jython.
:license: MIT, see LICENSE for more details.
"""
from os.path import abspath, dirname
import sys
base_dir = abspath(dirname(dirname(dirname(__file__))))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
import distutilazy.clean
class clean_jython_class(distutilazy.clean.CleanJythonClass):
pass
|
<commit_before><commit_msg>Add clean jython class command module
So it's possible to use this command when users add
distutilazy to their setup.cfg instead of
using command classes in their setup.py cmdclass.<commit_after>"""
distutilazy.command.clean_jython_class
--------------------------------------
Command to clean compiled .class files created
by Jython.
:license: MIT, see LICENSE for more details.
"""
from os.path import abspath, dirname
import sys
base_dir = abspath(dirname(dirname(dirname(__file__))))
if base_dir not in sys.path:
if len(sys.path):
sys.path.insert(1, base_dir)
else:
sys.path.append(base_dir)
import distutilazy.clean
class clean_jython_class(distutilazy.clean.CleanJythonClass):
pass
|
|
97c1991b9d44863fadec1464ab47eca9b9f28d4b
|
mne/tests/test_line_endings.py
|
mne/tests/test_line_endings.py
|
# Adapted from vispy
#
# License: BSD (3-clause)
import os
from nose.plugins.skip import SkipTest
from os import path as op
import sys
import mne
from mne.utils import run_tests_if_main
known_crlf = (
'FreeSurferColorLUT.txt',
'test_edf_stim_channel.txt',
'FieldTrip.py',
)
def test_line_endings():
"""Test files in the repository for CR characters
"""
if sys.platform == 'win32':
raise SkipTest('Skipping line endings check on Windows')
sys.stdout.flush()
report = []
import_dir = mne.__path__[0]
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount and op.basename(fname) not in known_crlf:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise AssertionError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
run_tests_if_main()
|
Add check for line endings
|
FIX: Add check for line endings
|
Python
|
bsd-3-clause
|
jniediek/mne-python,Teekuningas/mne-python,Teekuningas/mne-python,alexandrebarachant/mne-python,alexandrebarachant/mne-python,cmoutard/mne-python,kingjr/mne-python,rkmaddox/mne-python,Eric89GXL/mne-python,kambysese/mne-python,wmvanvliet/mne-python,Eric89GXL/mne-python,ARudiuk/mne-python,Teekuningas/mne-python,adykstra/mne-python,larsoner/mne-python,cjayb/mne-python,jniediek/mne-python,drammock/mne-python,wronk/mne-python,nicproulx/mne-python,larsoner/mne-python,pravsripad/mne-python,cjayb/mne-python,olafhauk/mne-python,larsoner/mne-python,ARudiuk/mne-python,adykstra/mne-python,drammock/mne-python,jaeilepp/mne-python,jmontoyam/mne-python,nicproulx/mne-python,cmoutard/mne-python,olafhauk/mne-python,pravsripad/mne-python,jmontoyam/mne-python,mne-tools/mne-python,jaeilepp/mne-python,rkmaddox/mne-python,wronk/mne-python,pravsripad/mne-python,bloyl/mne-python,wmvanvliet/mne-python,teonlamont/mne-python,bloyl/mne-python,mne-tools/mne-python,drammock/mne-python,teonlamont/mne-python,kingjr/mne-python,wmvanvliet/mne-python,kambysese/mne-python,mne-tools/mne-python,olafhauk/mne-python,kingjr/mne-python
|
FIX: Add check for line endings
|
# Adapted from vispy
#
# License: BSD (3-clause)
import os
from nose.plugins.skip import SkipTest
from os import path as op
import sys
import mne
from mne.utils import run_tests_if_main
known_crlf = (
'FreeSurferColorLUT.txt',
'test_edf_stim_channel.txt',
'FieldTrip.py',
)
def test_line_endings():
"""Test files in the repository for CR characters
"""
if sys.platform == 'win32':
raise SkipTest('Skipping line endings check on Windows')
sys.stdout.flush()
report = []
import_dir = mne.__path__[0]
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount and op.basename(fname) not in known_crlf:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise AssertionError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
run_tests_if_main()
|
<commit_before><commit_msg>FIX: Add check for line endings<commit_after>
|
# Adapted from vispy
#
# License: BSD (3-clause)
import os
from nose.plugins.skip import SkipTest
from os import path as op
import sys
import mne
from mne.utils import run_tests_if_main
known_crlf = (
'FreeSurferColorLUT.txt',
'test_edf_stim_channel.txt',
'FieldTrip.py',
)
def test_line_endings():
"""Test files in the repository for CR characters
"""
if sys.platform == 'win32':
raise SkipTest('Skipping line endings check on Windows')
sys.stdout.flush()
report = []
import_dir = mne.__path__[0]
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount and op.basename(fname) not in known_crlf:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise AssertionError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
run_tests_if_main()
|
FIX: Add check for line endings# Adapted from vispy
#
# License: BSD (3-clause)
import os
from nose.plugins.skip import SkipTest
from os import path as op
import sys
import mne
from mne.utils import run_tests_if_main
known_crlf = (
'FreeSurferColorLUT.txt',
'test_edf_stim_channel.txt',
'FieldTrip.py',
)
def test_line_endings():
"""Test files in the repository for CR characters
"""
if sys.platform == 'win32':
raise SkipTest('Skipping line endings check on Windows')
sys.stdout.flush()
report = []
import_dir = mne.__path__[0]
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount and op.basename(fname) not in known_crlf:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise AssertionError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
run_tests_if_main()
|
<commit_before><commit_msg>FIX: Add check for line endings<commit_after># Adapted from vispy
#
# License: BSD (3-clause)
import os
from nose.plugins.skip import SkipTest
from os import path as op
import sys
import mne
from mne.utils import run_tests_if_main
known_crlf = (
'FreeSurferColorLUT.txt',
'test_edf_stim_channel.txt',
'FieldTrip.py',
)
def test_line_endings():
"""Test files in the repository for CR characters
"""
if sys.platform == 'win32':
raise SkipTest('Skipping line endings check on Windows')
sys.stdout.flush()
report = []
import_dir = mne.__path__[0]
for dirpath, dirnames, filenames in os.walk(import_dir):
for fname in filenames:
if op.splitext(fname)[1] in ('.pyc', '.pyo'):
continue
# Get filename
filename = op.join(dirpath, fname)
relfilename = op.relpath(filename, import_dir)
# Open and check
try:
with open(filename, 'rb') as fid:
text = fid.read().decode('utf-8')
except UnicodeDecodeError:
continue # Probably a binary file
crcount = text.count('\r')
if crcount and op.basename(fname) not in known_crlf:
lfcount = text.count('\n')
report.append('In %s found %i/%i CR/LF' %
(relfilename, crcount, lfcount))
# Process result
if len(report) > 0:
raise AssertionError('Found %s files with incorrect endings:\n%s'
% (len(report), '\n'.join(report)))
run_tests_if_main()
|
|
aa9e3ae13e7d4a7ca8e72be8ab6d5ec7ea49f8e8
|
monasca_setup/detection/plugins/ironic.py
|
monasca_setup/detection/plugins/ironic.py
|
import logging
import monasca_setup.detection
log = logging.getLogger(__name__)
class Ironic(monasca_setup.detection.ServicePlugin):
"""Detect Ironic daemons and setup configuration to monitor them."""
def __init__(self, template_dir, overwrite=True, args=None):
service_api_url = "http://localhost:6385"
if isinstance(args, str):
try:
# Turn 'service_api_url=url' into
# dict {'service_api_url':'url'}
args_dict = dict([item.split('=') for item
in args.split()])
if "service_api_url" in args_dict:
service_api_url = args_dict['service_api_url']
except Exception:
log.exception('Error parsing detection arguments')
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'Baremetal',
'process_names': ['ironic-api', 'ironic-conductor'],
'service_api_url': service_api_url,
'search_pattern': '.*200 OK.*',
}
super(Ironic, self).__init__(service_params)
|
Add Ironic Service detection plugin
|
Add Ironic Service detection plugin
Change-Id: I829988eeeb53ffa2f8a21f25fcce83f7ff92143b
|
Python
|
bsd-3-clause
|
sapcc/monasca-agent,sapcc/monasca-agent,sapcc/monasca-agent
|
Add Ironic Service detection plugin
Change-Id: I829988eeeb53ffa2f8a21f25fcce83f7ff92143b
|
import logging
import monasca_setup.detection
log = logging.getLogger(__name__)
class Ironic(monasca_setup.detection.ServicePlugin):
"""Detect Ironic daemons and setup configuration to monitor them."""
def __init__(self, template_dir, overwrite=True, args=None):
service_api_url = "http://localhost:6385"
if isinstance(args, str):
try:
# Turn 'service_api_url=url' into
# dict {'service_api_url':'url'}
args_dict = dict([item.split('=') for item
in args.split()])
if "service_api_url" in args_dict:
service_api_url = args_dict['service_api_url']
except Exception:
log.exception('Error parsing detection arguments')
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'Baremetal',
'process_names': ['ironic-api', 'ironic-conductor'],
'service_api_url': service_api_url,
'search_pattern': '.*200 OK.*',
}
super(Ironic, self).__init__(service_params)
|
<commit_before><commit_msg>Add Ironic Service detection plugin
Change-Id: I829988eeeb53ffa2f8a21f25fcce83f7ff92143b<commit_after>
|
import logging
import monasca_setup.detection
log = logging.getLogger(__name__)
class Ironic(monasca_setup.detection.ServicePlugin):
"""Detect Ironic daemons and setup configuration to monitor them."""
def __init__(self, template_dir, overwrite=True, args=None):
service_api_url = "http://localhost:6385"
if isinstance(args, str):
try:
# Turn 'service_api_url=url' into
# dict {'service_api_url':'url'}
args_dict = dict([item.split('=') for item
in args.split()])
if "service_api_url" in args_dict:
service_api_url = args_dict['service_api_url']
except Exception:
log.exception('Error parsing detection arguments')
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'Baremetal',
'process_names': ['ironic-api', 'ironic-conductor'],
'service_api_url': service_api_url,
'search_pattern': '.*200 OK.*',
}
super(Ironic, self).__init__(service_params)
|
Add Ironic Service detection plugin
Change-Id: I829988eeeb53ffa2f8a21f25fcce83f7ff92143bimport logging
import monasca_setup.detection
log = logging.getLogger(__name__)
class Ironic(monasca_setup.detection.ServicePlugin):
"""Detect Ironic daemons and setup configuration to monitor them."""
def __init__(self, template_dir, overwrite=True, args=None):
service_api_url = "http://localhost:6385"
if isinstance(args, str):
try:
# Turn 'service_api_url=url' into
# dict {'service_api_url':'url'}
args_dict = dict([item.split('=') for item
in args.split()])
if "service_api_url" in args_dict:
service_api_url = args_dict['service_api_url']
except Exception:
log.exception('Error parsing detection arguments')
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'Baremetal',
'process_names': ['ironic-api', 'ironic-conductor'],
'service_api_url': service_api_url,
'search_pattern': '.*200 OK.*',
}
super(Ironic, self).__init__(service_params)
|
<commit_before><commit_msg>Add Ironic Service detection plugin
Change-Id: I829988eeeb53ffa2f8a21f25fcce83f7ff92143b<commit_after>import logging
import monasca_setup.detection
log = logging.getLogger(__name__)
class Ironic(monasca_setup.detection.ServicePlugin):
"""Detect Ironic daemons and setup configuration to monitor them."""
def __init__(self, template_dir, overwrite=True, args=None):
service_api_url = "http://localhost:6385"
if isinstance(args, str):
try:
# Turn 'service_api_url=url' into
# dict {'service_api_url':'url'}
args_dict = dict([item.split('=') for item
in args.split()])
if "service_api_url" in args_dict:
service_api_url = args_dict['service_api_url']
except Exception:
log.exception('Error parsing detection arguments')
service_params = {
'args': args,
'template_dir': template_dir,
'overwrite': overwrite,
'service_name': 'Baremetal',
'process_names': ['ironic-api', 'ironic-conductor'],
'service_api_url': service_api_url,
'search_pattern': '.*200 OK.*',
}
super(Ironic, self).__init__(service_params)
|
|
11853bead5a47d0b15877eb5e5968b91708bb223
|
API/chat/models.py
|
API/chat/models.py
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
return {
'text': self.text,
'datetime_start': self.datetime_start,
'datetime_sent': getattr(self, 'datetime_sent', False),
'username': self.username
}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
serializable_fields = ('text', 'datetime_start', 'datetime_sent', 'username')
return {key: getattr(self, key) for key in serializable_fields}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
Refactor to_dict method on the Message model
|
Refactor to_dict method on the Message model
|
Python
|
mit
|
odyvarv/ting-1,VitSalis/ting,odyvarv/ting-1,VitSalis/ting,mbalamat/ting,dionyziz/ting,gtklocker/ting,sirodoht/ting,mbalamat/ting,dionyziz/ting,VitSalis/ting,odyvarv/ting-1,mbalamat/ting,mbalamat/ting,VitSalis/ting,gtklocker/ting,odyvarv/ting-1,dionyziz/ting,sirodoht/ting,gtklocker/ting,gtklocker/ting,sirodoht/ting,sirodoht/ting,dionyziz/ting
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
return {
'text': self.text,
'datetime_start': self.datetime_start,
'datetime_sent': getattr(self, 'datetime_sent', False),
'username': self.username
}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
Refactor to_dict method on the Message model
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
serializable_fields = ('text', 'datetime_start', 'datetime_sent', 'username')
return {key: getattr(self, key) for key in serializable_fields}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
<commit_before>from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
return {
'text': self.text,
'datetime_start': self.datetime_start,
'datetime_sent': getattr(self, 'datetime_sent', False),
'username': self.username
}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
<commit_msg>Refactor to_dict method on the Message model<commit_after>
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
serializable_fields = ('text', 'datetime_start', 'datetime_sent', 'username')
return {key: getattr(self, key) for key in serializable_fields}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
return {
'text': self.text,
'datetime_start': self.datetime_start,
'datetime_sent': getattr(self, 'datetime_sent', False),
'username': self.username
}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
Refactor to_dict method on the Message modelfrom django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
serializable_fields = ('text', 'datetime_start', 'datetime_sent', 'username')
return {key: getattr(self, key) for key in serializable_fields}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
<commit_before>from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
return {
'text': self.text,
'datetime_start': self.datetime_start,
'datetime_sent': getattr(self, 'datetime_sent', False),
'username': self.username
}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
<commit_msg>Refactor to_dict method on the Message model<commit_after>from django.db import models
class Channel(models.Model):
def __str__(self):
return self.name
name = models.CharField(max_length=20, unique=True)
class Message(models.Model):
def __str__(self):
return self.text
def to_dict(self):
serializable_fields = ('text', 'datetime_start', 'datetime_sent', 'username')
return {key: getattr(self, key) for key in serializable_fields}
text = models.TextField(max_length=2000)
datetime_start = models.DateTimeField(default=None)
datetime_sent = models.DateTimeField(default=None, null=True)
typing = models.BooleanField(default=False)
username = models.CharField(max_length=20)
channel = models.ForeignKey(Channel)
|
975fc40b7055db0834713ffd91033bc8fac5d007
|
emotools/heem_utils.py
|
emotools/heem_utils.py
|
heem_concept_type_labels = ['Emotie', 'Lichaamswerking', 'Lichaamsdeel',
'EmotioneleHandeling']
heem_emotion_labels = ['Achterdocht', 'Angst', 'Bedruktheid',
'Beledigd', 'Berusting', 'Bezorgdheid', 'Blijdschap',
'Eergevoel', 'Geluk', 'Gemis', 'Haat', 'Hebzucht',
'Hoop', 'Jaloezie', 'Liefde', 'Mededogen', 'Ongeluk',
'Ontroering', 'Ontzag', 'Opluchting', 'Overig',
'Schaamte', 'Teleurstelling', 'Toewijding', 'Trots',
'Trouw', 'Verdriet', 'Verlangen', 'Vertrouwen',
'Verwondering', 'Walging', 'Wanhoop', 'Welwillendheid',
'Woede', 'Wraakzucht', 'Wrevel', 'Wroeging', 'Wrok']
heem_body_part_labels = ["Heart", "Longs", "Jaws", "Ears", "Mind", "Head",
"Arms", "Hair", "Bones", "Lips", "Horns",
"Throat", "Tooth", "Conscience", "Knees", "Limbs",
"Breath", "Liver", "Phlegm", "Wound", "Fingers",
"Snot", "Forehead", "Tears", "Legs", "Cheeks",
"Body", "Eyes", "Senses", "Stomach", "Wrists",
"Nails", "Mouth", "Breast", "Hands", "Marrow",
"Varia", "Spirit", "Intestines", "Shoulder",
"Feet", "Bile", "Voice", "Neck", "Skull", "Brain",
"Soul", "Face", "Sweat", "Shins", "Veins",
"Tongue", "Womb", "Blood"]
|
Add heem label sets as importable variables
|
Add heem label sets as importable variables
Lists of HEEM labels have been added as variables that can be imported
by other modules and used to print the labels in fixed orders.
|
Python
|
apache-2.0
|
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
|
Add heem label sets as importable variables
Lists of HEEM labels have been added as variables that can be imported
by other modules and used to print the labels in fixed orders.
|
heem_concept_type_labels = ['Emotie', 'Lichaamswerking', 'Lichaamsdeel',
'EmotioneleHandeling']
heem_emotion_labels = ['Achterdocht', 'Angst', 'Bedruktheid',
'Beledigd', 'Berusting', 'Bezorgdheid', 'Blijdschap',
'Eergevoel', 'Geluk', 'Gemis', 'Haat', 'Hebzucht',
'Hoop', 'Jaloezie', 'Liefde', 'Mededogen', 'Ongeluk',
'Ontroering', 'Ontzag', 'Opluchting', 'Overig',
'Schaamte', 'Teleurstelling', 'Toewijding', 'Trots',
'Trouw', 'Verdriet', 'Verlangen', 'Vertrouwen',
'Verwondering', 'Walging', 'Wanhoop', 'Welwillendheid',
'Woede', 'Wraakzucht', 'Wrevel', 'Wroeging', 'Wrok']
heem_body_part_labels = ["Heart", "Longs", "Jaws", "Ears", "Mind", "Head",
"Arms", "Hair", "Bones", "Lips", "Horns",
"Throat", "Tooth", "Conscience", "Knees", "Limbs",
"Breath", "Liver", "Phlegm", "Wound", "Fingers",
"Snot", "Forehead", "Tears", "Legs", "Cheeks",
"Body", "Eyes", "Senses", "Stomach", "Wrists",
"Nails", "Mouth", "Breast", "Hands", "Marrow",
"Varia", "Spirit", "Intestines", "Shoulder",
"Feet", "Bile", "Voice", "Neck", "Skull", "Brain",
"Soul", "Face", "Sweat", "Shins", "Veins",
"Tongue", "Womb", "Blood"]
|
<commit_before><commit_msg>Add heem label sets as importable variables
Lists of HEEM labels have been added as variables that can be imported
by other modules and used to print the labels in fixed orders.<commit_after>
|
heem_concept_type_labels = ['Emotie', 'Lichaamswerking', 'Lichaamsdeel',
'EmotioneleHandeling']
heem_emotion_labels = ['Achterdocht', 'Angst', 'Bedruktheid',
'Beledigd', 'Berusting', 'Bezorgdheid', 'Blijdschap',
'Eergevoel', 'Geluk', 'Gemis', 'Haat', 'Hebzucht',
'Hoop', 'Jaloezie', 'Liefde', 'Mededogen', 'Ongeluk',
'Ontroering', 'Ontzag', 'Opluchting', 'Overig',
'Schaamte', 'Teleurstelling', 'Toewijding', 'Trots',
'Trouw', 'Verdriet', 'Verlangen', 'Vertrouwen',
'Verwondering', 'Walging', 'Wanhoop', 'Welwillendheid',
'Woede', 'Wraakzucht', 'Wrevel', 'Wroeging', 'Wrok']
heem_body_part_labels = ["Heart", "Longs", "Jaws", "Ears", "Mind", "Head",
"Arms", "Hair", "Bones", "Lips", "Horns",
"Throat", "Tooth", "Conscience", "Knees", "Limbs",
"Breath", "Liver", "Phlegm", "Wound", "Fingers",
"Snot", "Forehead", "Tears", "Legs", "Cheeks",
"Body", "Eyes", "Senses", "Stomach", "Wrists",
"Nails", "Mouth", "Breast", "Hands", "Marrow",
"Varia", "Spirit", "Intestines", "Shoulder",
"Feet", "Bile", "Voice", "Neck", "Skull", "Brain",
"Soul", "Face", "Sweat", "Shins", "Veins",
"Tongue", "Womb", "Blood"]
|
Add heem label sets as importable variables
Lists of HEEM labels have been added as variables that can be imported
by other modules and used to print the labels in fixed orders.heem_concept_type_labels = ['Emotie', 'Lichaamswerking', 'Lichaamsdeel',
'EmotioneleHandeling']
heem_emotion_labels = ['Achterdocht', 'Angst', 'Bedruktheid',
'Beledigd', 'Berusting', 'Bezorgdheid', 'Blijdschap',
'Eergevoel', 'Geluk', 'Gemis', 'Haat', 'Hebzucht',
'Hoop', 'Jaloezie', 'Liefde', 'Mededogen', 'Ongeluk',
'Ontroering', 'Ontzag', 'Opluchting', 'Overig',
'Schaamte', 'Teleurstelling', 'Toewijding', 'Trots',
'Trouw', 'Verdriet', 'Verlangen', 'Vertrouwen',
'Verwondering', 'Walging', 'Wanhoop', 'Welwillendheid',
'Woede', 'Wraakzucht', 'Wrevel', 'Wroeging', 'Wrok']
heem_body_part_labels = ["Heart", "Longs", "Jaws", "Ears", "Mind", "Head",
"Arms", "Hair", "Bones", "Lips", "Horns",
"Throat", "Tooth", "Conscience", "Knees", "Limbs",
"Breath", "Liver", "Phlegm", "Wound", "Fingers",
"Snot", "Forehead", "Tears", "Legs", "Cheeks",
"Body", "Eyes", "Senses", "Stomach", "Wrists",
"Nails", "Mouth", "Breast", "Hands", "Marrow",
"Varia", "Spirit", "Intestines", "Shoulder",
"Feet", "Bile", "Voice", "Neck", "Skull", "Brain",
"Soul", "Face", "Sweat", "Shins", "Veins",
"Tongue", "Womb", "Blood"]
|
<commit_before><commit_msg>Add heem label sets as importable variables
Lists of HEEM labels have been added as variables that can be imported
by other modules and used to print the labels in fixed orders.<commit_after>heem_concept_type_labels = ['Emotie', 'Lichaamswerking', 'Lichaamsdeel',
'EmotioneleHandeling']
heem_emotion_labels = ['Achterdocht', 'Angst', 'Bedruktheid',
'Beledigd', 'Berusting', 'Bezorgdheid', 'Blijdschap',
'Eergevoel', 'Geluk', 'Gemis', 'Haat', 'Hebzucht',
'Hoop', 'Jaloezie', 'Liefde', 'Mededogen', 'Ongeluk',
'Ontroering', 'Ontzag', 'Opluchting', 'Overig',
'Schaamte', 'Teleurstelling', 'Toewijding', 'Trots',
'Trouw', 'Verdriet', 'Verlangen', 'Vertrouwen',
'Verwondering', 'Walging', 'Wanhoop', 'Welwillendheid',
'Woede', 'Wraakzucht', 'Wrevel', 'Wroeging', 'Wrok']
heem_body_part_labels = ["Heart", "Longs", "Jaws", "Ears", "Mind", "Head",
"Arms", "Hair", "Bones", "Lips", "Horns",
"Throat", "Tooth", "Conscience", "Knees", "Limbs",
"Breath", "Liver", "Phlegm", "Wound", "Fingers",
"Snot", "Forehead", "Tears", "Legs", "Cheeks",
"Body", "Eyes", "Senses", "Stomach", "Wrists",
"Nails", "Mouth", "Breast", "Hands", "Marrow",
"Varia", "Spirit", "Intestines", "Shoulder",
"Feet", "Bile", "Voice", "Neck", "Skull", "Brain",
"Soul", "Face", "Sweat", "Shins", "Veins",
"Tongue", "Womb", "Blood"]
|
|
dfadbe61ae26fd5d89fdcc599277e3df1d573b07
|
examples/basic_auth/app.py
|
examples/basic_auth/app.py
|
from flask import Flask, g, request, session, redirect, url_for
from flask.ext.simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
ldap = LDAP(app)
@app.route('/')
@ldap.basic_auth_required
def index():
return "Welcome, {0}!".format(g.ldap_username)
if __name__ == '__main__':
app.run()
|
Add an example of using .basic_auth_required()
|
Add an example of using .basic_auth_required()
|
Python
|
mit
|
admiralobvious/flask-simpleldap
|
Add an example of using .basic_auth_required()
|
from flask import Flask, g, request, session, redirect, url_for
from flask.ext.simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
ldap = LDAP(app)
@app.route('/')
@ldap.basic_auth_required
def index():
return "Welcome, {0}!".format(g.ldap_username)
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add an example of using .basic_auth_required()<commit_after>
|
from flask import Flask, g, request, session, redirect, url_for
from flask.ext.simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
ldap = LDAP(app)
@app.route('/')
@ldap.basic_auth_required
def index():
return "Welcome, {0}!".format(g.ldap_username)
if __name__ == '__main__':
app.run()
|
Add an example of using .basic_auth_required()from flask import Flask, g, request, session, redirect, url_for
from flask.ext.simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
ldap = LDAP(app)
@app.route('/')
@ldap.basic_auth_required
def index():
return "Welcome, {0}!".format(g.ldap_username)
if __name__ == '__main__':
app.run()
|
<commit_before><commit_msg>Add an example of using .basic_auth_required()<commit_after>from flask import Flask, g, request, session, redirect, url_for
from flask.ext.simpleldap import LDAP
app = Flask(__name__)
app.secret_key = 'dev key'
app.debug = True
app.config['LDAP_HOST'] = 'ldap.example.org'
app.config['LDAP_BASE_DN'] = 'OU=users,dc=example,dc=org'
app.config['LDAP_USERNAME'] = 'CN=user,OU=Users,DC=example,DC=org'
app.config['LDAP_PASSWORD'] = 'password'
ldap = LDAP(app)
@app.route('/')
@ldap.basic_auth_required
def index():
return "Welcome, {0}!".format(g.ldap_username)
if __name__ == '__main__':
app.run()
|
|
d3c4c17b5f999ab68401c954d30077c8b48b9b36
|
data/nh2010/extract_sparse_matrix.py
|
data/nh2010/extract_sparse_matrix.py
|
from __future__ import print_function
import sys
# Given a matrix in Matrix Market format, converts it into the following format:
# Line 1: # of rows
# Line 2..n: Row# col1_# col1_weight col2_# col2_weight ... coln_# coln_weight
input_file = sys.argv[1]
output_file = sys.argv[2]
f = open(input_file)
f_out = open(output_file, "w")
node_id, node_count = {}, 0
m = {}
for line in f:
if line[0] == "%":
continue
c1, c2, weight = [int(x) for x in line.strip().split()]
if c1 not in node_id:
node_id[c1] = node_count
node_count += 1
if c2 not in node_id:
node_id[c2] = node_count
node_count += 1
if node_id[c1] not in m:
m[node_id[c1]] = []
if node_id[c2] not in m:
m[node_id[c2]] = []
m[node_id[c1]].append((node_id[c2], weight))
m[node_id[c2]].append((node_id[c1], weight))
print("%d" % node_count, file=f_out)
for i in range(node_count):
index_weight_pairs = [str(x[0]) + " " + str(x[1]) for x in m[i]]
line_str = str(i) + " " + " ".join(index_weight_pairs)
line_str = line_str.strip()
print(line_str, file=f_out)
f.close()
|
Add extract sparse matrix script
|
Add extract sparse matrix script
|
Python
|
apache-2.0
|
agnusmaximus/cyclades,agnusmaximus/cyclades,agnusmaximus/cyclades,agnusmaximus/cyclades
|
Add extract sparse matrix script
|
from __future__ import print_function
import sys
# Given a matrix in Matrix Market format, converts it into the following format:
# Line 1: # of rows
# Line 2..n: Row# col1_# col1_weight col2_# col2_weight ... coln_# coln_weight
input_file = sys.argv[1]
output_file = sys.argv[2]
f = open(input_file)
f_out = open(output_file, "w")
node_id, node_count = {}, 0
m = {}
for line in f:
if line[0] == "%":
continue
c1, c2, weight = [int(x) for x in line.strip().split()]
if c1 not in node_id:
node_id[c1] = node_count
node_count += 1
if c2 not in node_id:
node_id[c2] = node_count
node_count += 1
if node_id[c1] not in m:
m[node_id[c1]] = []
if node_id[c2] not in m:
m[node_id[c2]] = []
m[node_id[c1]].append((node_id[c2], weight))
m[node_id[c2]].append((node_id[c1], weight))
print("%d" % node_count, file=f_out)
for i in range(node_count):
index_weight_pairs = [str(x[0]) + " " + str(x[1]) for x in m[i]]
line_str = str(i) + " " + " ".join(index_weight_pairs)
line_str = line_str.strip()
print(line_str, file=f_out)
f.close()
|
<commit_before><commit_msg>Add extract sparse matrix script<commit_after>
|
from __future__ import print_function
import sys
# Given a matrix in Matrix Market format, converts it into the following format:
# Line 1: # of rows
# Line 2..n: Row# col1_# col1_weight col2_# col2_weight ... coln_# coln_weight
input_file = sys.argv[1]
output_file = sys.argv[2]
f = open(input_file)
f_out = open(output_file, "w")
node_id, node_count = {}, 0
m = {}
for line in f:
if line[0] == "%":
continue
c1, c2, weight = [int(x) for x in line.strip().split()]
if c1 not in node_id:
node_id[c1] = node_count
node_count += 1
if c2 not in node_id:
node_id[c2] = node_count
node_count += 1
if node_id[c1] not in m:
m[node_id[c1]] = []
if node_id[c2] not in m:
m[node_id[c2]] = []
m[node_id[c1]].append((node_id[c2], weight))
m[node_id[c2]].append((node_id[c1], weight))
print("%d" % node_count, file=f_out)
for i in range(node_count):
index_weight_pairs = [str(x[0]) + " " + str(x[1]) for x in m[i]]
line_str = str(i) + " " + " ".join(index_weight_pairs)
line_str = line_str.strip()
print(line_str, file=f_out)
f.close()
|
Add extract sparse matrix scriptfrom __future__ import print_function
import sys
# Given a matrix in Matrix Market format, converts it into the following format:
# Line 1: # of rows
# Line 2..n: Row# col1_# col1_weight col2_# col2_weight ... coln_# coln_weight
input_file = sys.argv[1]
output_file = sys.argv[2]
f = open(input_file)
f_out = open(output_file, "w")
node_id, node_count = {}, 0
m = {}
for line in f:
if line[0] == "%":
continue
c1, c2, weight = [int(x) for x in line.strip().split()]
if c1 not in node_id:
node_id[c1] = node_count
node_count += 1
if c2 not in node_id:
node_id[c2] = node_count
node_count += 1
if node_id[c1] not in m:
m[node_id[c1]] = []
if node_id[c2] not in m:
m[node_id[c2]] = []
m[node_id[c1]].append((node_id[c2], weight))
m[node_id[c2]].append((node_id[c1], weight))
print("%d" % node_count, file=f_out)
for i in range(node_count):
index_weight_pairs = [str(x[0]) + " " + str(x[1]) for x in m[i]]
line_str = str(i) + " " + " ".join(index_weight_pairs)
line_str = line_str.strip()
print(line_str, file=f_out)
f.close()
|
<commit_before><commit_msg>Add extract sparse matrix script<commit_after>from __future__ import print_function
import sys
# Given a matrix in Matrix Market format, converts it into the following format:
# Line 1: # of rows
# Line 2..n: Row# col1_# col1_weight col2_# col2_weight ... coln_# coln_weight
input_file = sys.argv[1]
output_file = sys.argv[2]
f = open(input_file)
f_out = open(output_file, "w")
node_id, node_count = {}, 0
m = {}
for line in f:
if line[0] == "%":
continue
c1, c2, weight = [int(x) for x in line.strip().split()]
if c1 not in node_id:
node_id[c1] = node_count
node_count += 1
if c2 not in node_id:
node_id[c2] = node_count
node_count += 1
if node_id[c1] not in m:
m[node_id[c1]] = []
if node_id[c2] not in m:
m[node_id[c2]] = []
m[node_id[c1]].append((node_id[c2], weight))
m[node_id[c2]].append((node_id[c1], weight))
print("%d" % node_count, file=f_out)
for i in range(node_count):
index_weight_pairs = [str(x[0]) + " " + str(x[1]) for x in m[i]]
line_str = str(i) + " " + " ".join(index_weight_pairs)
line_str = line_str.strip()
print(line_str, file=f_out)
f.close()
|
|
4755e3d5160589c1e7a6d28f949f6977e06b6e5c
|
djstripe/migrations/0013_remove_card_stripe_id_default.py
|
djstripe/migrations/0013_remove_card_stripe_id_default.py
|
# Generated by Django 2.0 on 2017-12-03 01:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0012_card_customer_from_source'),
]
operations = [
migrations.AlterField(
model_name='card',
name='stripe_id',
field=djstripe.fields.StripeIdField(max_length=255, unique=True),
),
]
|
Add missing migrations for removing default from Card.source_id
|
Add missing migrations for removing default from Card.source_id
|
Python
|
mit
|
pydanny/dj-stripe,pydanny/dj-stripe,kavdev/dj-stripe,jleclanche/dj-stripe,dj-stripe/dj-stripe,jleclanche/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe
|
Add missing migrations for removing default from Card.source_id
|
# Generated by Django 2.0 on 2017-12-03 01:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0012_card_customer_from_source'),
]
operations = [
migrations.AlterField(
model_name='card',
name='stripe_id',
field=djstripe.fields.StripeIdField(max_length=255, unique=True),
),
]
|
<commit_before><commit_msg>Add missing migrations for removing default from Card.source_id<commit_after>
|
# Generated by Django 2.0 on 2017-12-03 01:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0012_card_customer_from_source'),
]
operations = [
migrations.AlterField(
model_name='card',
name='stripe_id',
field=djstripe.fields.StripeIdField(max_length=255, unique=True),
),
]
|
Add missing migrations for removing default from Card.source_id# Generated by Django 2.0 on 2017-12-03 01:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0012_card_customer_from_source'),
]
operations = [
migrations.AlterField(
model_name='card',
name='stripe_id',
field=djstripe.fields.StripeIdField(max_length=255, unique=True),
),
]
|
<commit_before><commit_msg>Add missing migrations for removing default from Card.source_id<commit_after># Generated by Django 2.0 on 2017-12-03 01:21
from django.db import migrations
import djstripe.fields
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0012_card_customer_from_source'),
]
operations = [
migrations.AlterField(
model_name='card',
name='stripe_id',
field=djstripe.fields.StripeIdField(max_length=255, unique=True),
),
]
|
|
a43aa19c982d46c8290d0501f5616a96ed2b0d05
|
tests/test_package.py
|
tests/test_package.py
|
import yaml
def test_master_shipped_with_sha256():
"""
Test the Master is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/master', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
def test_minion_shipped_with_sha256():
"""
Test the Minion is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/minion', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
|
Test master and minion shipped with sha256
|
Test master and minion shipped with sha256
|
Python
|
mit
|
dincamihai/salt-toaster,dincamihai/salt-toaster
|
Test master and minion shipped with sha256
|
import yaml
def test_master_shipped_with_sha256():
"""
Test the Master is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/master', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
def test_minion_shipped_with_sha256():
"""
Test the Minion is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/minion', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
|
<commit_before><commit_msg>Test master and minion shipped with sha256<commit_after>
|
import yaml
def test_master_shipped_with_sha256():
"""
Test the Master is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/master', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
def test_minion_shipped_with_sha256():
"""
Test the Minion is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/minion', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
|
Test master and minion shipped with sha256import yaml
def test_master_shipped_with_sha256():
"""
Test the Master is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/master', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
def test_minion_shipped_with_sha256():
"""
Test the Minion is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/minion', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
|
<commit_before><commit_msg>Test master and minion shipped with sha256<commit_after>import yaml
def test_master_shipped_with_sha256():
"""
Test the Master is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/master', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
def test_minion_shipped_with_sha256():
"""
Test the Minion is *shipped* with hash type set to SHA256.
"""
with open('/etc/salt/minion', 'rb') as master_config:
content = yaml.load(master_config)
assert content['hash_type'] == 'sha256'
|
|
5c11e882496782eea18a1d0e7369eab71408e3c4
|
datatools/jsontr.py
|
datatools/jsontr.py
|
# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
import json
def camelize(data, source, dest=None):
dest = dest or source
for o in data.values():
if source not in o: continue
v = o[source]
o[dest] = v[0].lower() + re.sub(r"\W", "", v.title()[1:])
return data
def sort_keys(o):
out = OrderedDict()
for key in sorted(o.keys()):
out[key] = o[key]
return out
def to_list(o, keykey='key'):
l = []
for k, v in o.items():
if isinstance(v, dict) and keykey not in v:
v2 = OrderedDict()
v2[keykey] = k
v2.update(v)
l.append(v2)
else:
l.append(v)
return l
def to_dict(data, keykey, keep=None):
return OrderedDict(
(v[keykey] if keep == 'keep' else v.pop(keykey), v)
for v in data)
def objectify(data, key):
for k, v in data.items():
data[k] = {key: v}
return data
if __name__ == '__main__':
import sys
tr = vars()[sys.argv[1]]
data = json.load(sys.stdin, object_pairs_hook=OrderedDict)
data = tr(data, *sys.argv[2:])
print json.dumps(data,
indent=2,
ensure_ascii=False,
separators=(',', ': ')
).encode('utf-8')
|
Make a simple json transformer tool
|
Make a simple json transformer tool
|
Python
|
apache-2.0
|
libris/librisxl,libris/librisxl,libris/librisxl
|
Make a simple json transformer tool
|
# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
import json
def camelize(data, source, dest=None):
dest = dest or source
for o in data.values():
if source not in o: continue
v = o[source]
o[dest] = v[0].lower() + re.sub(r"\W", "", v.title()[1:])
return data
def sort_keys(o):
out = OrderedDict()
for key in sorted(o.keys()):
out[key] = o[key]
return out
def to_list(o, keykey='key'):
l = []
for k, v in o.items():
if isinstance(v, dict) and keykey not in v:
v2 = OrderedDict()
v2[keykey] = k
v2.update(v)
l.append(v2)
else:
l.append(v)
return l
def to_dict(data, keykey, keep=None):
return OrderedDict(
(v[keykey] if keep == 'keep' else v.pop(keykey), v)
for v in data)
def objectify(data, key):
for k, v in data.items():
data[k] = {key: v}
return data
if __name__ == '__main__':
import sys
tr = vars()[sys.argv[1]]
data = json.load(sys.stdin, object_pairs_hook=OrderedDict)
data = tr(data, *sys.argv[2:])
print json.dumps(data,
indent=2,
ensure_ascii=False,
separators=(',', ': ')
).encode('utf-8')
|
<commit_before><commit_msg>Make a simple json transformer tool<commit_after>
|
# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
import json
def camelize(data, source, dest=None):
dest = dest or source
for o in data.values():
if source not in o: continue
v = o[source]
o[dest] = v[0].lower() + re.sub(r"\W", "", v.title()[1:])
return data
def sort_keys(o):
out = OrderedDict()
for key in sorted(o.keys()):
out[key] = o[key]
return out
def to_list(o, keykey='key'):
l = []
for k, v in o.items():
if isinstance(v, dict) and keykey not in v:
v2 = OrderedDict()
v2[keykey] = k
v2.update(v)
l.append(v2)
else:
l.append(v)
return l
def to_dict(data, keykey, keep=None):
return OrderedDict(
(v[keykey] if keep == 'keep' else v.pop(keykey), v)
for v in data)
def objectify(data, key):
for k, v in data.items():
data[k] = {key: v}
return data
if __name__ == '__main__':
import sys
tr = vars()[sys.argv[1]]
data = json.load(sys.stdin, object_pairs_hook=OrderedDict)
data = tr(data, *sys.argv[2:])
print json.dumps(data,
indent=2,
ensure_ascii=False,
separators=(',', ': ')
).encode('utf-8')
|
Make a simple json transformer tool# -*- coding: utf-8 -*-
import re
from collections import OrderedDict
import json
def camelize(data, source, dest=None):
dest = dest or source
for o in data.values():
if source not in o: continue
v = o[source]
o[dest] = v[0].lower() + re.sub(r"\W", "", v.title()[1:])
return data
def sort_keys(o):
out = OrderedDict()
for key in sorted(o.keys()):
out[key] = o[key]
return out
def to_list(o, keykey='key'):
l = []
for k, v in o.items():
if isinstance(v, dict) and keykey not in v:
v2 = OrderedDict()
v2[keykey] = k
v2.update(v)
l.append(v2)
else:
l.append(v)
return l
def to_dict(data, keykey, keep=None):
return OrderedDict(
(v[keykey] if keep == 'keep' else v.pop(keykey), v)
for v in data)
def objectify(data, key):
for k, v in data.items():
data[k] = {key: v}
return data
if __name__ == '__main__':
import sys
tr = vars()[sys.argv[1]]
data = json.load(sys.stdin, object_pairs_hook=OrderedDict)
data = tr(data, *sys.argv[2:])
print json.dumps(data,
indent=2,
ensure_ascii=False,
separators=(',', ': ')
).encode('utf-8')
|
<commit_before><commit_msg>Make a simple json transformer tool<commit_after># -*- coding: utf-8 -*-
import re
from collections import OrderedDict
import json
def camelize(data, source, dest=None):
dest = dest or source
for o in data.values():
if source not in o: continue
v = o[source]
o[dest] = v[0].lower() + re.sub(r"\W", "", v.title()[1:])
return data
def sort_keys(o):
out = OrderedDict()
for key in sorted(o.keys()):
out[key] = o[key]
return out
def to_list(o, keykey='key'):
l = []
for k, v in o.items():
if isinstance(v, dict) and keykey not in v:
v2 = OrderedDict()
v2[keykey] = k
v2.update(v)
l.append(v2)
else:
l.append(v)
return l
def to_dict(data, keykey, keep=None):
return OrderedDict(
(v[keykey] if keep == 'keep' else v.pop(keykey), v)
for v in data)
def objectify(data, key):
for k, v in data.items():
data[k] = {key: v}
return data
if __name__ == '__main__':
import sys
tr = vars()[sys.argv[1]]
data = json.load(sys.stdin, object_pairs_hook=OrderedDict)
data = tr(data, *sys.argv[2:])
print json.dumps(data,
indent=2,
ensure_ascii=False,
separators=(',', ': ')
).encode('utf-8')
|
|
b05529297d388e175ff83c1080e6127f3141dc55
|
billjobs/tests/tests_user_admin_api.py
|
billjobs/tests/tests_user_admin_api.py
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
Add test for user admin api
|
Add test for user admin api
|
Python
|
mit
|
ioO/billjobs
|
Add test for user admin api
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
<commit_before><commit_msg>Add test for user admin api<commit_after>
|
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
Add test for user admin apifrom django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
<commit_before><commit_msg>Add test for user admin api<commit_after>from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework import status
from rest_framework.test import APIClient, APIRequestFactory, \
force_authenticate
from billjobs.views import UserAdmin
class UserAdminAPI(TestCase):
""" Test User Admin API REST endpoint """
fixtures=['account_test.yaml']
def setUp(self):
self.client = APIClient()
self.factory = APIRequestFactory()
self.admin = User.objects.get(pk=1)
def test_admin_list_user(self):
request = self.factory.get('/billjobs/users/')
force_authenticate(request, user=self.admin)
view = UserAdmin.as_view()
response = view(request)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
|
161a02ae054c9783eeeed97a680c28d184d63cd5
|
scripts/queue_speed.py
|
scripts/queue_speed.py
|
# -*- coding: utf-8 -*-
from __future__ import print_function
try:
from insight.insight_settings import DEFAULT_REDIS_QUEUE_KEY
except ImportError:
DEFAULT_REDIS_QUEUE_KEY = 'insight-reloaded'
import requests
import time
import sys
API_URL = 'http://localhost:8888/'
QUEUE = 'insight-reloaded'
def main():
# Init the system
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()
STARTING = {'number_in_queue': stats["number_in_queue"],
'time': time.time()}
while True:
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()["number_in_queue"]
deltaQ = stats - STARTING['number_in_queue']
deltaT = time.time() - STARTING['time']
avg = deltaQ * -3600 / deltaT
print(
'\r%s — current: %d — avg: %.2f per hour' % (QUEUE, stats, avg),
end=''
)
sys.stdout.flush()
time.sleep(2)
if __name__ == '__main__':
main()
|
Add a script to watch the queue speed
|
Add a script to watch the queue speed
|
Python
|
bsd-3-clause
|
novapost/insight-reloaded
|
Add a script to watch the queue speed
|
# -*- coding: utf-8 -*-
from __future__ import print_function
try:
from insight.insight_settings import DEFAULT_REDIS_QUEUE_KEY
except ImportError:
DEFAULT_REDIS_QUEUE_KEY = 'insight-reloaded'
import requests
import time
import sys
API_URL = 'http://localhost:8888/'
QUEUE = 'insight-reloaded'
def main():
# Init the system
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()
STARTING = {'number_in_queue': stats["number_in_queue"],
'time': time.time()}
while True:
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()["number_in_queue"]
deltaQ = stats - STARTING['number_in_queue']
deltaT = time.time() - STARTING['time']
avg = deltaQ * -3600 / deltaT
print(
'\r%s — current: %d — avg: %.2f per hour' % (QUEUE, stats, avg),
end=''
)
sys.stdout.flush()
time.sleep(2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to watch the queue speed<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import print_function
try:
from insight.insight_settings import DEFAULT_REDIS_QUEUE_KEY
except ImportError:
DEFAULT_REDIS_QUEUE_KEY = 'insight-reloaded'
import requests
import time
import sys
API_URL = 'http://localhost:8888/'
QUEUE = 'insight-reloaded'
def main():
# Init the system
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()
STARTING = {'number_in_queue': stats["number_in_queue"],
'time': time.time()}
while True:
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()["number_in_queue"]
deltaQ = stats - STARTING['number_in_queue']
deltaT = time.time() - STARTING['time']
avg = deltaQ * -3600 / deltaT
print(
'\r%s — current: %d — avg: %.2f per hour' % (QUEUE, stats, avg),
end=''
)
sys.stdout.flush()
time.sleep(2)
if __name__ == '__main__':
main()
|
Add a script to watch the queue speed# -*- coding: utf-8 -*-
from __future__ import print_function
try:
from insight.insight_settings import DEFAULT_REDIS_QUEUE_KEY
except ImportError:
DEFAULT_REDIS_QUEUE_KEY = 'insight-reloaded'
import requests
import time
import sys
API_URL = 'http://localhost:8888/'
QUEUE = 'insight-reloaded'
def main():
# Init the system
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()
STARTING = {'number_in_queue': stats["number_in_queue"],
'time': time.time()}
while True:
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()["number_in_queue"]
deltaQ = stats - STARTING['number_in_queue']
deltaT = time.time() - STARTING['time']
avg = deltaQ * -3600 / deltaT
print(
'\r%s — current: %d — avg: %.2f per hour' % (QUEUE, stats, avg),
end=''
)
sys.stdout.flush()
time.sleep(2)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add a script to watch the queue speed<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
try:
from insight.insight_settings import DEFAULT_REDIS_QUEUE_KEY
except ImportError:
DEFAULT_REDIS_QUEUE_KEY = 'insight-reloaded'
import requests
import time
import sys
API_URL = 'http://localhost:8888/'
QUEUE = 'insight-reloaded'
def main():
# Init the system
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()
STARTING = {'number_in_queue': stats["number_in_queue"],
'time': time.time()}
while True:
resp = requests.get('%s%s/status' % (API_URL, QUEUE))
stats = resp.json()["number_in_queue"]
deltaQ = stats - STARTING['number_in_queue']
deltaT = time.time() - STARTING['time']
avg = deltaQ * -3600 / deltaT
print(
'\r%s — current: %d — avg: %.2f per hour' % (QUEUE, stats, avg),
end=''
)
sys.stdout.flush()
time.sleep(2)
if __name__ == '__main__':
main()
|
|
b72fa34e703f8083e9df8811151bd6d0b1a8343b
|
utilities/__init__.py
|
utilities/__init__.py
|
#! /usr/bin/env python
from subprocess import Popen, PIPE
def _popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
|
Add a commonly used subprocess command of mine
|
Add a commonly used subprocess command of mine
|
Python
|
mit
|
IanLee1521/utilities
|
Add a commonly used subprocess command of mine
|
#! /usr/bin/env python
from subprocess import Popen, PIPE
def _popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
|
<commit_before><commit_msg>Add a commonly used subprocess command of mine<commit_after>
|
#! /usr/bin/env python
from subprocess import Popen, PIPE
def _popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
|
Add a commonly used subprocess command of mine#! /usr/bin/env python
from subprocess import Popen, PIPE
def _popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
|
<commit_before><commit_msg>Add a commonly used subprocess command of mine<commit_after>#! /usr/bin/env python
from subprocess import Popen, PIPE
def _popen(cmd):
"""
Fork the specified command, returning a tuple of (stdout, stderr)
"""
return Popen(cmd, shell=True, stdout=PIPE, stderr=PIPE).communicate()
|
|
ab9184243df13fb1ab130a334a3c66283cc79fda
|
src/twitter_bot/service/url_shortener.py
|
src/twitter_bot/service/url_shortener.py
|
import json
import requests
from twitter_bot.config.api_keys import GOOGLE_API_KEYS
def get_short_url(long_url):
"""Call Google URL Shortener API to return shortened URL"""
api_key = GOOGLE_API_KEYS['URL_SHORTENER_API_KEY']
base_url = 'https://www.googleapis.com/urlshortener/v1/url'
headers = {'Content-Type':'application/json'}
post_data = json.dumps({"longUrl": long_url})
response = requests.post(base_url, params={'key': api_key}, data=post_data, headers=headers)
return response.json()['id']
|
Add support for Google URL Shortener API
|
Add support for Google URL Shortener API
|
Python
|
mit
|
econne01/twitter-news-bot
|
Add support for Google URL Shortener API
|
import json
import requests
from twitter_bot.config.api_keys import GOOGLE_API_KEYS
def get_short_url(long_url):
"""Call Google URL Shortener API to return shortened URL"""
api_key = GOOGLE_API_KEYS['URL_SHORTENER_API_KEY']
base_url = 'https://www.googleapis.com/urlshortener/v1/url'
headers = {'Content-Type':'application/json'}
post_data = json.dumps({"longUrl": long_url})
response = requests.post(base_url, params={'key': api_key}, data=post_data, headers=headers)
return response.json()['id']
|
<commit_before><commit_msg>Add support for Google URL Shortener API<commit_after>
|
import json
import requests
from twitter_bot.config.api_keys import GOOGLE_API_KEYS
def get_short_url(long_url):
"""Call Google URL Shortener API to return shortened URL"""
api_key = GOOGLE_API_KEYS['URL_SHORTENER_API_KEY']
base_url = 'https://www.googleapis.com/urlshortener/v1/url'
headers = {'Content-Type':'application/json'}
post_data = json.dumps({"longUrl": long_url})
response = requests.post(base_url, params={'key': api_key}, data=post_data, headers=headers)
return response.json()['id']
|
Add support for Google URL Shortener APIimport json
import requests
from twitter_bot.config.api_keys import GOOGLE_API_KEYS
def get_short_url(long_url):
"""Call Google URL Shortener API to return shortened URL"""
api_key = GOOGLE_API_KEYS['URL_SHORTENER_API_KEY']
base_url = 'https://www.googleapis.com/urlshortener/v1/url'
headers = {'Content-Type':'application/json'}
post_data = json.dumps({"longUrl": long_url})
response = requests.post(base_url, params={'key': api_key}, data=post_data, headers=headers)
return response.json()['id']
|
<commit_before><commit_msg>Add support for Google URL Shortener API<commit_after>import json
import requests
from twitter_bot.config.api_keys import GOOGLE_API_KEYS
def get_short_url(long_url):
"""Call Google URL Shortener API to return shortened URL"""
api_key = GOOGLE_API_KEYS['URL_SHORTENER_API_KEY']
base_url = 'https://www.googleapis.com/urlshortener/v1/url'
headers = {'Content-Type':'application/json'}
post_data = json.dumps({"longUrl": long_url})
response = requests.post(base_url, params={'key': api_key}, data=post_data, headers=headers)
return response.json()['id']
|
|
74f4b46ab44016f9e7ad56cde5916ffbc45723d3
|
ppapi/generators/idl_visitor.py
|
ppapi/generators/idl_visitor.py
|
#!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
|
Add missing IDL Visistor class
|
Add missing IDL Visistor class
This class provides a simple mechanism for recursively traversing the AST
for both simple and version aware traversal.
TBR= sehr@google.com
BUG= http://code.google.com/p/chromium/issues/detail?id=87684
TEST= python idl_c_header.py
Review URL: http://codereview.chromium.org/7448001
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@93036 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,anirudhSK/chromium,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,keishi/chromium,krieger-od/nwjs_chromium.src,Chilledheart/chromium,dednal/chromium.src,dushu1203/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,keishi/chromium,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,timopulkkinen/BubbleFish,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,keishi/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,mogoweb/chromium-crosswalk,markYoungH/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,robclark/chromium,hgl888/chromium-crosswalk,Jonekee/chromium.src,patrickm/chromium.src,Fireblend/chromium-crosswalk,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,jaruba/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,robclark/chromium,zcbenz/cefode-chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,keishi/chromium,Jonekee/chromium.src,nacl-webkit/chrome_deps,keishi/chromium,bright-sparks/chromium-spacewalk,anirudhSK/chromium,rogerwang/chromium,ondra-novak/chromium.src,hujiajie/pa-chromium,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,anirudhSK/chromium,dushu1203/chromium.src,keishi/chromium,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,keishi/chromium,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,anirudhSK/chromium,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,ondra-novak/chromium.src,hujiajie/pa-chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,robclark/chromium,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,Chilledheart/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,littlstar/chromium.src,keishi/chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,jaruba/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,M4sse/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,ltilve/chromium,jaruba/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,rogerwang/chromium,axinging/chromium-crosswalk,bright-sparks/chromium-spacewalk,robclark/chromium,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,robclark/chromium,keishi/chromium,Just-D/chromium-1,ondra-novak/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,timopulkkinen/BubbleFish,dushu1203/chromium.src,robclark/chromium,littlstar/chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,markYoungH/chromium.src,Chilledheart/chromium,keishi/chromium,M4sse/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,timopulkkinen/BubbleFish,jaruba/chromium.src,mogoweb/chromium-crosswalk,robclark/chromium,dednal/chromium.src,patrickm/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,zcbenz/cefode-chromium,crosswalk-project/chromium-crosswalk-efl,patrickm/chromium.src,patrickm/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,robclark/chromium,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,markYoungH/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,Just-D/chromium-1,rogerwang/chromium,Just-D/chromium-1,timopulkkinen/BubbleFish,Chilledheart/chromium,patrickm/chromium.src,dushu1203/chromium.src,anirudhSK/chromium,ChromiumWebApps/chromium,rogerwang/chromium,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,dushu1203/chromium.src,nacl-webkit/chrome_deps,Chilledheart/chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,Chilledheart/chromium,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,robclark/chromium,rogerwang/chromium,chuan9/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,hujiajie/pa-chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,robclark/chromium,nacl-webkit/chrome_deps,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,patrickm/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,littlstar/chromium.src,hujiajie/pa-chromium,ondra-novak/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,rogerwang/chromium,ChromiumWebApps/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,markYoungH/chromium.src,timopulkkinen/BubbleFish,keishi/chromium,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,dednal/chromium.src,ltilve/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,rogerwang/chromium,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,ltilve/chromium,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,rogerwang/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,littlstar/chromium.src,rogerwang/chromium,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk
|
Add missing IDL Visistor class
This class provides a simple mechanism for recursively traversing the AST
for both simple and version aware traversal.
TBR= sehr@google.com
BUG= http://code.google.com/p/chromium/issues/detail?id=87684
TEST= python idl_c_header.py
Review URL: http://codereview.chromium.org/7448001
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@93036 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
|
<commit_before><commit_msg>Add missing IDL Visistor class
This class provides a simple mechanism for recursively traversing the AST
for both simple and version aware traversal.
TBR= sehr@google.com
BUG= http://code.google.com/p/chromium/issues/detail?id=87684
TEST= python idl_c_header.py
Review URL: http://codereview.chromium.org/7448001
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@93036 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
|
Add missing IDL Visistor class
This class provides a simple mechanism for recursively traversing the AST
for both simple and version aware traversal.
TBR= sehr@google.com
BUG= http://code.google.com/p/chromium/issues/detail?id=87684
TEST= python idl_c_header.py
Review URL: http://codereview.chromium.org/7448001
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@93036 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
|
<commit_before><commit_msg>Add missing IDL Visistor class
This class provides a simple mechanism for recursively traversing the AST
for both simple and version aware traversal.
TBR= sehr@google.com
BUG= http://code.google.com/p/chromium/issues/detail?id=87684
TEST= python idl_c_header.py
Review URL: http://codereview.chromium.org/7448001
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@93036 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
#
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Visitor Object for traversing AST """
#
# IDLVisitor
#
# The IDLVisitor class will traverse an AST truncating portions of the tree
# that fail due to class or version filters. For each node, after the filter
# passes, the visitor will call the 'Arive' member passing in the node and
# and data passing in from the parent call. It will then Visit the children.
# When done processing children, the visitor will call the 'Depart' member
# before returning
#
class IDLVisitor(object):
def __init__(self):
self.depth = 0
# Return TRUE if the node should be visited
def VisitFilter(self, node, data):
return True
# Return TRUE if data should be added to the childdata list
def AgrigateFilter(self, data):
return data is not None
def Visit(self, node, data):
self.depth += 1
if not self.VisitFilter(node, data): return None
childdata = []
newdata = self.Arrive(node, data)
for child in node.GetChildren():
ret = self.Visit(child, newdata)
if self.AgrigateFilter(ret):
childdata.append(ret)
out = self.Depart(node, newdata, childdata)
self.depth -= 1
return out
def Arrive(self, node, data):
return data
def Depart(self, node, data, childdata):
return data
#
# IDLVersionVisitor
#
# The IDLVersionVisitor will only visit nodes with intervals that include the
# version. It will also optionally filter based on a class list
#
class IDLVersionVisitor(object):
def __init__(self, version, classList):
self.version = version
self.classes = classes
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
class IDLRangeVisitor(object):
def __init__(self, vmin, vmax, classList):
self.vmin = vmin
self.vmax = vmax
self.classList = classList
def Filter(self, node, data):
if self.classList and node.cls not in self.classList: return False
if not node.IsVersion(self.version): return False
return True
|
|
baf32512ad87cf9cf51877aaaa44396aca777cce
|
alembic/versions/4c4b79f8c4a_adding_geom_gix_to_markers.py
|
alembic/versions/4c4b79f8c4a_adding_geom_gix_to_markers.py
|
"""Adding geom geographical index to markers and discussions
Revision ID: 4c4b79f8c4a
Revises: 10ffa15c5d24
Create Date: 2018-03-07 13:49:06.780319
"""
# revision identifiers, used by Alembic.
revision = '4c4b79f8c4a'
down_revision = '10ffa15c5d24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('CREATE INDEX geom_gix ON markers USING GIST (geography(geom));')
conn.execute('CREATE INDEX discussions_gix ON discussions USING GIST (geography(geom));')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('DROP INDEX geom_gix;')
conn.execute('DROP INDEX discussions_gix;')
### end Alembic commands ###
|
Add geon gix idx to markers and discussions
|
Add geon gix idx to markers and discussions
|
Python
|
mit
|
hasadna/anyway,hasadna/anyway,hasadna/anyway,hasadna/anyway
|
Add geon gix idx to markers and discussions
|
"""Adding geom geographical index to markers and discussions
Revision ID: 4c4b79f8c4a
Revises: 10ffa15c5d24
Create Date: 2018-03-07 13:49:06.780319
"""
# revision identifiers, used by Alembic.
revision = '4c4b79f8c4a'
down_revision = '10ffa15c5d24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('CREATE INDEX geom_gix ON markers USING GIST (geography(geom));')
conn.execute('CREATE INDEX discussions_gix ON discussions USING GIST (geography(geom));')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('DROP INDEX geom_gix;')
conn.execute('DROP INDEX discussions_gix;')
### end Alembic commands ###
|
<commit_before><commit_msg>Add geon gix idx to markers and discussions<commit_after>
|
"""Adding geom geographical index to markers and discussions
Revision ID: 4c4b79f8c4a
Revises: 10ffa15c5d24
Create Date: 2018-03-07 13:49:06.780319
"""
# revision identifiers, used by Alembic.
revision = '4c4b79f8c4a'
down_revision = '10ffa15c5d24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('CREATE INDEX geom_gix ON markers USING GIST (geography(geom));')
conn.execute('CREATE INDEX discussions_gix ON discussions USING GIST (geography(geom));')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('DROP INDEX geom_gix;')
conn.execute('DROP INDEX discussions_gix;')
### end Alembic commands ###
|
Add geon gix idx to markers and discussions"""Adding geom geographical index to markers and discussions
Revision ID: 4c4b79f8c4a
Revises: 10ffa15c5d24
Create Date: 2018-03-07 13:49:06.780319
"""
# revision identifiers, used by Alembic.
revision = '4c4b79f8c4a'
down_revision = '10ffa15c5d24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('CREATE INDEX geom_gix ON markers USING GIST (geography(geom));')
conn.execute('CREATE INDEX discussions_gix ON discussions USING GIST (geography(geom));')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('DROP INDEX geom_gix;')
conn.execute('DROP INDEX discussions_gix;')
### end Alembic commands ###
|
<commit_before><commit_msg>Add geon gix idx to markers and discussions<commit_after>"""Adding geom geographical index to markers and discussions
Revision ID: 4c4b79f8c4a
Revises: 10ffa15c5d24
Create Date: 2018-03-07 13:49:06.780319
"""
# revision identifiers, used by Alembic.
revision = '4c4b79f8c4a'
down_revision = '10ffa15c5d24'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('CREATE INDEX geom_gix ON markers USING GIST (geography(geom));')
conn.execute('CREATE INDEX discussions_gix ON discussions USING GIST (geography(geom));')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
conn = op.get_bind()
conn.execute('DROP INDEX geom_gix;')
conn.execute('DROP INDEX discussions_gix;')
### end Alembic commands ###
|
|
4b9d0550702093b3dcd49be257de8874c2464bb5
|
mysite/settings/tests.py
|
mysite/settings/tests.py
|
from .base import * # noqa
DATABASES['default']['CONN_MAX_AGE'] = 0
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
RUNNING_TESTS = True
SECRET_KEY = "just here for testing"
|
Refactor settings to be pure python
|
Refactor settings to be pure python
This starts to bring the settings in to line with a) all the other DC
projects and b) a pure python way of settings everything up
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
Refactor settings to be pure python
This starts to bring the settings in to line with a) all the other DC
projects and b) a pure python way of settings everything up
|
from .base import * # noqa
DATABASES['default']['CONN_MAX_AGE'] = 0
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
RUNNING_TESTS = True
SECRET_KEY = "just here for testing"
|
<commit_before><commit_msg>Refactor settings to be pure python
This starts to bring the settings in to line with a) all the other DC
projects and b) a pure python way of settings everything up<commit_after>
|
from .base import * # noqa
DATABASES['default']['CONN_MAX_AGE'] = 0
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
RUNNING_TESTS = True
SECRET_KEY = "just here for testing"
|
Refactor settings to be pure python
This starts to bring the settings in to line with a) all the other DC
projects and b) a pure python way of settings everything upfrom .base import * # noqa
DATABASES['default']['CONN_MAX_AGE'] = 0
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
RUNNING_TESTS = True
SECRET_KEY = "just here for testing"
|
<commit_before><commit_msg>Refactor settings to be pure python
This starts to bring the settings in to line with a) all the other DC
projects and b) a pure python way of settings everything up<commit_after>from .base import * # noqa
DATABASES['default']['CONN_MAX_AGE'] = 0
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.MD5PasswordHasher',
]
RUNNING_TESTS = True
SECRET_KEY = "just here for testing"
|
|
d8390d1111dcd4fd4af0b195dc0f36d352d25f4e
|
astropy/coordinates/tests/test_utils.py
|
astropy/coordinates/tests/test_utils.py
|
from astropy.time import Time
from astropy.coordinates.builtin_frames.utils import get_polar_motion
from astropy.utils.exceptions import AstropyWarning
import pytest
def test_polar_motion_unsupported_dates():
msg = r'Tried to get polar motions for times {} IERS.*'
with pytest.warns(AstropyWarning, match=msg.format('before')):
get_polar_motion(Time('1900-01-01'))
with pytest.warns(AstropyWarning, match=msg.format('after')):
get_polar_motion(Time('2100-01-01'))
|
Add test for polar motion warnings
|
Add test for polar motion warnings
|
Python
|
bsd-3-clause
|
saimn/astropy,larrybradley/astropy,saimn/astropy,pllim/astropy,dhomeier/astropy,lpsinger/astropy,astropy/astropy,lpsinger/astropy,saimn/astropy,mhvk/astropy,larrybradley/astropy,astropy/astropy,astropy/astropy,StuartLittlefair/astropy,mhvk/astropy,larrybradley/astropy,dhomeier/astropy,dhomeier/astropy,pllim/astropy,saimn/astropy,lpsinger/astropy,mhvk/astropy,mhvk/astropy,aleksandr-bakanov/astropy,pllim/astropy,dhomeier/astropy,StuartLittlefair/astropy,pllim/astropy,aleksandr-bakanov/astropy,astropy/astropy,larrybradley/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,dhomeier/astropy,lpsinger/astropy,StuartLittlefair/astropy,lpsinger/astropy,larrybradley/astropy,pllim/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,mhvk/astropy,astropy/astropy
|
Add test for polar motion warnings
|
from astropy.time import Time
from astropy.coordinates.builtin_frames.utils import get_polar_motion
from astropy.utils.exceptions import AstropyWarning
import pytest
def test_polar_motion_unsupported_dates():
msg = r'Tried to get polar motions for times {} IERS.*'
with pytest.warns(AstropyWarning, match=msg.format('before')):
get_polar_motion(Time('1900-01-01'))
with pytest.warns(AstropyWarning, match=msg.format('after')):
get_polar_motion(Time('2100-01-01'))
|
<commit_before><commit_msg>Add test for polar motion warnings<commit_after>
|
from astropy.time import Time
from astropy.coordinates.builtin_frames.utils import get_polar_motion
from astropy.utils.exceptions import AstropyWarning
import pytest
def test_polar_motion_unsupported_dates():
msg = r'Tried to get polar motions for times {} IERS.*'
with pytest.warns(AstropyWarning, match=msg.format('before')):
get_polar_motion(Time('1900-01-01'))
with pytest.warns(AstropyWarning, match=msg.format('after')):
get_polar_motion(Time('2100-01-01'))
|
Add test for polar motion warningsfrom astropy.time import Time
from astropy.coordinates.builtin_frames.utils import get_polar_motion
from astropy.utils.exceptions import AstropyWarning
import pytest
def test_polar_motion_unsupported_dates():
msg = r'Tried to get polar motions for times {} IERS.*'
with pytest.warns(AstropyWarning, match=msg.format('before')):
get_polar_motion(Time('1900-01-01'))
with pytest.warns(AstropyWarning, match=msg.format('after')):
get_polar_motion(Time('2100-01-01'))
|
<commit_before><commit_msg>Add test for polar motion warnings<commit_after>from astropy.time import Time
from astropy.coordinates.builtin_frames.utils import get_polar_motion
from astropy.utils.exceptions import AstropyWarning
import pytest
def test_polar_motion_unsupported_dates():
msg = r'Tried to get polar motions for times {} IERS.*'
with pytest.warns(AstropyWarning, match=msg.format('before')):
get_polar_motion(Time('1900-01-01'))
with pytest.warns(AstropyWarning, match=msg.format('after')):
get_polar_motion(Time('2100-01-01'))
|
|
c536a3f11230ecbb7db3fd1adc3dab585e083d0a
|
OPEN/macros/bls_gls.py
|
OPEN/macros/bls_gls.py
|
# In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic
magic('per obs') # to calculate GLS
from OPEN.periodograms import bls
default.per2 = bls(default) # calculate BLS and store it in the system
# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()
from matplotlib.pylab import semilogx
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='gls')
|
Add 1st macro script; ability to run macros is here already.
|
Add 1st macro script; ability to run macros is here already.
The 'run' command from IPython already has the ability to work like
a macro reader/executer, with only simple changes to the script
files to allow for the OPEN magics. Don't forget the -i option to
expose the current namespace.
|
Python
|
mit
|
j-faria/OPEN,j-faria/OPEN
|
Add 1st macro script; ability to run macros is here already.
The 'run' command from IPython already has the ability to work like
a macro reader/executer, with only simple changes to the script
files to allow for the OPEN magics. Don't forget the -i option to
expose the current namespace.
|
# In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic
magic('per obs') # to calculate GLS
from OPEN.periodograms import bls
default.per2 = bls(default) # calculate BLS and store it in the system
# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()
from matplotlib.pylab import semilogx
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='gls')
|
<commit_before><commit_msg>Add 1st macro script; ability to run macros is here already.
The 'run' command from IPython already has the ability to work like
a macro reader/executer, with only simple changes to the script
files to allow for the OPEN magics. Don't forget the -i option to
expose the current namespace.<commit_after>
|
# In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic
magic('per obs') # to calculate GLS
from OPEN.periodograms import bls
default.per2 = bls(default) # calculate BLS and store it in the system
# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()
from matplotlib.pylab import semilogx
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='gls')
|
Add 1st macro script; ability to run macros is here already.
The 'run' command from IPython already has the ability to work like
a macro reader/executer, with only simple changes to the script
files to allow for the OPEN magics. Don't forget the -i option to
expose the current namespace.# In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic
magic('per obs') # to calculate GLS
from OPEN.periodograms import bls
default.per2 = bls(default) # calculate BLS and store it in the system
# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()
from matplotlib.pylab import semilogx
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='gls')
|
<commit_before><commit_msg>Add 1st macro script; ability to run macros is here already.
The 'run' command from IPython already has the ability to work like
a macro reader/executer, with only simple changes to the script
files to allow for the OPEN magics. Don't forget the -i option to
expose the current namespace.<commit_after># In order to plot the GLS and BLS together 'run -i' this script:
magic = get_ipython().magic
magic('per obs') # to calculate GLS
from OPEN.periodograms import bls
default.per2 = bls(default) # calculate BLS and store it in the system
# for normalization
a1 = default.per2.power.max()
a2 = default.per.power.max()
from matplotlib.pylab import semilogx
semilogx(1./default.per.freq, default.per.power, 'b-', label='gls')
semilogx(1./default.per2.freq, default.per2.power/a1*a2, 'r-', label='gls')
|
|
38e3cc382f2f626b82515f1d7b4e00a51462d4a3
|
conf_site/api/test/test_presentation.py
|
conf_site/api/test/test_presentation.py
|
import datetime
from django.core.urlresolvers import reverse
from rest_framework import status
from symposion.schedule.models import (
Presentation,
Slot,
SlotKind,
Section,
Schedule,
Day,
)
from symposion.proposals.models import ProposalKind
from symposion.speakers.models import Speaker, User
from conf_site.proposals.models import Proposal
from .base import TestBase
class TestSponsor(TestBase):
@classmethod
def setUpTestData(cls):
super(TestSponsor, cls).setUpTestData()
cls.speaker = Speaker.objects.create(
user=User.objects.create_user('test', 'test@pydata.org', 'test'),
name='test speaker',
)
cls.schedule = Schedule.objects.create(
section=Section.objects.first(),
)
cls.presentation = Presentation.objects.create(
slot=Slot.objects.create(
name='test slot',
day=Day.objects.create(
schedule=cls.schedule,
date=datetime.date.today(),
),
kind=SlotKind.objects.create(
schedule=cls.schedule,
label='45-min talk',
),
start=datetime.time(),
end=datetime.time(),
),
title='test presentation',
description='test description',
abstract='test abstract',
speaker=cls.speaker,
proposal_base=Proposal.objects.create(
kind=ProposalKind.objects.first(),
title='Test proposal',
description='lorem ipsum'*15,
abstract='lorem ipsum'*15,
speaker=cls.speaker,
audience_level=Proposal.AUDIENCE_LEVEL_NOVICE,
),
section=Section.objects.first(),
)
def test_presentation_list_api_anonymous_user(self):
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_list_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_presentation_detail_api_anonymous_user(self):
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_detail_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
Add tests for Presentation viewset.
|
Add tests for Presentation viewset.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
Add tests for Presentation viewset.
|
import datetime
from django.core.urlresolvers import reverse
from rest_framework import status
from symposion.schedule.models import (
Presentation,
Slot,
SlotKind,
Section,
Schedule,
Day,
)
from symposion.proposals.models import ProposalKind
from symposion.speakers.models import Speaker, User
from conf_site.proposals.models import Proposal
from .base import TestBase
class TestSponsor(TestBase):
@classmethod
def setUpTestData(cls):
super(TestSponsor, cls).setUpTestData()
cls.speaker = Speaker.objects.create(
user=User.objects.create_user('test', 'test@pydata.org', 'test'),
name='test speaker',
)
cls.schedule = Schedule.objects.create(
section=Section.objects.first(),
)
cls.presentation = Presentation.objects.create(
slot=Slot.objects.create(
name='test slot',
day=Day.objects.create(
schedule=cls.schedule,
date=datetime.date.today(),
),
kind=SlotKind.objects.create(
schedule=cls.schedule,
label='45-min talk',
),
start=datetime.time(),
end=datetime.time(),
),
title='test presentation',
description='test description',
abstract='test abstract',
speaker=cls.speaker,
proposal_base=Proposal.objects.create(
kind=ProposalKind.objects.first(),
title='Test proposal',
description='lorem ipsum'*15,
abstract='lorem ipsum'*15,
speaker=cls.speaker,
audience_level=Proposal.AUDIENCE_LEVEL_NOVICE,
),
section=Section.objects.first(),
)
def test_presentation_list_api_anonymous_user(self):
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_list_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_presentation_detail_api_anonymous_user(self):
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_detail_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
<commit_before><commit_msg>Add tests for Presentation viewset.<commit_after>
|
import datetime
from django.core.urlresolvers import reverse
from rest_framework import status
from symposion.schedule.models import (
Presentation,
Slot,
SlotKind,
Section,
Schedule,
Day,
)
from symposion.proposals.models import ProposalKind
from symposion.speakers.models import Speaker, User
from conf_site.proposals.models import Proposal
from .base import TestBase
class TestSponsor(TestBase):
@classmethod
def setUpTestData(cls):
super(TestSponsor, cls).setUpTestData()
cls.speaker = Speaker.objects.create(
user=User.objects.create_user('test', 'test@pydata.org', 'test'),
name='test speaker',
)
cls.schedule = Schedule.objects.create(
section=Section.objects.first(),
)
cls.presentation = Presentation.objects.create(
slot=Slot.objects.create(
name='test slot',
day=Day.objects.create(
schedule=cls.schedule,
date=datetime.date.today(),
),
kind=SlotKind.objects.create(
schedule=cls.schedule,
label='45-min talk',
),
start=datetime.time(),
end=datetime.time(),
),
title='test presentation',
description='test description',
abstract='test abstract',
speaker=cls.speaker,
proposal_base=Proposal.objects.create(
kind=ProposalKind.objects.first(),
title='Test proposal',
description='lorem ipsum'*15,
abstract='lorem ipsum'*15,
speaker=cls.speaker,
audience_level=Proposal.AUDIENCE_LEVEL_NOVICE,
),
section=Section.objects.first(),
)
def test_presentation_list_api_anonymous_user(self):
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_list_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_presentation_detail_api_anonymous_user(self):
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_detail_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
Add tests for Presentation viewset.import datetime
from django.core.urlresolvers import reverse
from rest_framework import status
from symposion.schedule.models import (
Presentation,
Slot,
SlotKind,
Section,
Schedule,
Day,
)
from symposion.proposals.models import ProposalKind
from symposion.speakers.models import Speaker, User
from conf_site.proposals.models import Proposal
from .base import TestBase
class TestSponsor(TestBase):
@classmethod
def setUpTestData(cls):
super(TestSponsor, cls).setUpTestData()
cls.speaker = Speaker.objects.create(
user=User.objects.create_user('test', 'test@pydata.org', 'test'),
name='test speaker',
)
cls.schedule = Schedule.objects.create(
section=Section.objects.first(),
)
cls.presentation = Presentation.objects.create(
slot=Slot.objects.create(
name='test slot',
day=Day.objects.create(
schedule=cls.schedule,
date=datetime.date.today(),
),
kind=SlotKind.objects.create(
schedule=cls.schedule,
label='45-min talk',
),
start=datetime.time(),
end=datetime.time(),
),
title='test presentation',
description='test description',
abstract='test abstract',
speaker=cls.speaker,
proposal_base=Proposal.objects.create(
kind=ProposalKind.objects.first(),
title='Test proposal',
description='lorem ipsum'*15,
abstract='lorem ipsum'*15,
speaker=cls.speaker,
audience_level=Proposal.AUDIENCE_LEVEL_NOVICE,
),
section=Section.objects.first(),
)
def test_presentation_list_api_anonymous_user(self):
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_list_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_presentation_detail_api_anonymous_user(self):
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_detail_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
<commit_before><commit_msg>Add tests for Presentation viewset.<commit_after>import datetime
from django.core.urlresolvers import reverse
from rest_framework import status
from symposion.schedule.models import (
Presentation,
Slot,
SlotKind,
Section,
Schedule,
Day,
)
from symposion.proposals.models import ProposalKind
from symposion.speakers.models import Speaker, User
from conf_site.proposals.models import Proposal
from .base import TestBase
class TestSponsor(TestBase):
@classmethod
def setUpTestData(cls):
super(TestSponsor, cls).setUpTestData()
cls.speaker = Speaker.objects.create(
user=User.objects.create_user('test', 'test@pydata.org', 'test'),
name='test speaker',
)
cls.schedule = Schedule.objects.create(
section=Section.objects.first(),
)
cls.presentation = Presentation.objects.create(
slot=Slot.objects.create(
name='test slot',
day=Day.objects.create(
schedule=cls.schedule,
date=datetime.date.today(),
),
kind=SlotKind.objects.create(
schedule=cls.schedule,
label='45-min talk',
),
start=datetime.time(),
end=datetime.time(),
),
title='test presentation',
description='test description',
abstract='test abstract',
speaker=cls.speaker,
proposal_base=Proposal.objects.create(
kind=ProposalKind.objects.first(),
title='Test proposal',
description='lorem ipsum'*15,
abstract='lorem ipsum'*15,
speaker=cls.speaker,
audience_level=Proposal.AUDIENCE_LEVEL_NOVICE,
),
section=Section.objects.first(),
)
def test_presentation_list_api_anonymous_user(self):
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_list_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(reverse('presentation-list'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_presentation_detail_api_anonymous_user(self):
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_presentation_detail_api_admin_user(self):
self.client.login(username='admin@pydata.org', password='admin')
response = self.client.get(
reverse('presentation-detail',args=[self.presentation.pk])
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
|
b992a2a8f9b8dd029319c2b037eee7ad529d06c1
|
contrib/firmware_packager/add_dfu_header.py
|
contrib/firmware_packager/add_dfu_header.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Richard Hughes <richard@hughsie.com>
#
# SPDX-License-Identifier: LGPL-2.1+
import struct
import zlib
import argparse
def main(bin_fn, dfu_fn, pad, vid, pid, rev):
# read binary file
with open(bin_fn, 'rb') as f:
blob = f.read()
# pad blob to a specific size
if pad:
while len(blob) < int(pad, 16):
blob += b'\0'
# create DFU footer with checksum
blob += struct.pack('<HHHH3sB',
int(rev, 16), # version
int(pid, 16), # PID
int(vid, 16), # VID
0x0100, # DFU version
b'UFD', # signature
0x10) # hdrlen
crc32 = zlib.crc32(blob) ^ 0xffffffff
blob += struct.pack('<L', crc32)
# write binary file
with open(dfu_fn, 'wb') as f:
f.write(blob)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description='Add DFU footer on firmware')
parser.add_argument('--bin', help='Path to the .bin file', required=True)
parser.add_argument('--dfu', help='Output DFU file path', required=True)
parser.add_argument('--pad', help='Pad to a specific size, e.g. 0x4000', default=None)
parser.add_argument('--vid', help='Vendor ID, e.g. 0x273f', required=True)
parser.add_argument('--pid', help='Product ID, e.g. 0x1002', required=True)
parser.add_argument('--rev', help='Revision, e.g. 0x1000', required=True)
args = parser.parse_args()
main(args.bin, args.dfu, args.pad, args.vid, args.pid, args.rev)
|
Add a helper script to add a DFU header
|
Add a helper script to add a DFU header
This is so simple we can do it in 10 lines of Python. It matches the format of
the `add_capsule_header.py` utility.
|
Python
|
lgpl-2.1
|
fwupd/fwupd,fwupd/fwupd,hughsie/fwupd,fwupd/fwupd,hughsie/fwupd,fwupd/fwupd,hughsie/fwupd,hughsie/fwupd
|
Add a helper script to add a DFU header
This is so simple we can do it in 10 lines of Python. It matches the format of
the `add_capsule_header.py` utility.
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Richard Hughes <richard@hughsie.com>
#
# SPDX-License-Identifier: LGPL-2.1+
import struct
import zlib
import argparse
def main(bin_fn, dfu_fn, pad, vid, pid, rev):
# read binary file
with open(bin_fn, 'rb') as f:
blob = f.read()
# pad blob to a specific size
if pad:
while len(blob) < int(pad, 16):
blob += b'\0'
# create DFU footer with checksum
blob += struct.pack('<HHHH3sB',
int(rev, 16), # version
int(pid, 16), # PID
int(vid, 16), # VID
0x0100, # DFU version
b'UFD', # signature
0x10) # hdrlen
crc32 = zlib.crc32(blob) ^ 0xffffffff
blob += struct.pack('<L', crc32)
# write binary file
with open(dfu_fn, 'wb') as f:
f.write(blob)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description='Add DFU footer on firmware')
parser.add_argument('--bin', help='Path to the .bin file', required=True)
parser.add_argument('--dfu', help='Output DFU file path', required=True)
parser.add_argument('--pad', help='Pad to a specific size, e.g. 0x4000', default=None)
parser.add_argument('--vid', help='Vendor ID, e.g. 0x273f', required=True)
parser.add_argument('--pid', help='Product ID, e.g. 0x1002', required=True)
parser.add_argument('--rev', help='Revision, e.g. 0x1000', required=True)
args = parser.parse_args()
main(args.bin, args.dfu, args.pad, args.vid, args.pid, args.rev)
|
<commit_before><commit_msg>Add a helper script to add a DFU header
This is so simple we can do it in 10 lines of Python. It matches the format of
the `add_capsule_header.py` utility.<commit_after>
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Richard Hughes <richard@hughsie.com>
#
# SPDX-License-Identifier: LGPL-2.1+
import struct
import zlib
import argparse
def main(bin_fn, dfu_fn, pad, vid, pid, rev):
# read binary file
with open(bin_fn, 'rb') as f:
blob = f.read()
# pad blob to a specific size
if pad:
while len(blob) < int(pad, 16):
blob += b'\0'
# create DFU footer with checksum
blob += struct.pack('<HHHH3sB',
int(rev, 16), # version
int(pid, 16), # PID
int(vid, 16), # VID
0x0100, # DFU version
b'UFD', # signature
0x10) # hdrlen
crc32 = zlib.crc32(blob) ^ 0xffffffff
blob += struct.pack('<L', crc32)
# write binary file
with open(dfu_fn, 'wb') as f:
f.write(blob)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description='Add DFU footer on firmware')
parser.add_argument('--bin', help='Path to the .bin file', required=True)
parser.add_argument('--dfu', help='Output DFU file path', required=True)
parser.add_argument('--pad', help='Pad to a specific size, e.g. 0x4000', default=None)
parser.add_argument('--vid', help='Vendor ID, e.g. 0x273f', required=True)
parser.add_argument('--pid', help='Product ID, e.g. 0x1002', required=True)
parser.add_argument('--rev', help='Revision, e.g. 0x1000', required=True)
args = parser.parse_args()
main(args.bin, args.dfu, args.pad, args.vid, args.pid, args.rev)
|
Add a helper script to add a DFU header
This is so simple we can do it in 10 lines of Python. It matches the format of
the `add_capsule_header.py` utility.#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Richard Hughes <richard@hughsie.com>
#
# SPDX-License-Identifier: LGPL-2.1+
import struct
import zlib
import argparse
def main(bin_fn, dfu_fn, pad, vid, pid, rev):
# read binary file
with open(bin_fn, 'rb') as f:
blob = f.read()
# pad blob to a specific size
if pad:
while len(blob) < int(pad, 16):
blob += b'\0'
# create DFU footer with checksum
blob += struct.pack('<HHHH3sB',
int(rev, 16), # version
int(pid, 16), # PID
int(vid, 16), # VID
0x0100, # DFU version
b'UFD', # signature
0x10) # hdrlen
crc32 = zlib.crc32(blob) ^ 0xffffffff
blob += struct.pack('<L', crc32)
# write binary file
with open(dfu_fn, 'wb') as f:
f.write(blob)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description='Add DFU footer on firmware')
parser.add_argument('--bin', help='Path to the .bin file', required=True)
parser.add_argument('--dfu', help='Output DFU file path', required=True)
parser.add_argument('--pad', help='Pad to a specific size, e.g. 0x4000', default=None)
parser.add_argument('--vid', help='Vendor ID, e.g. 0x273f', required=True)
parser.add_argument('--pid', help='Product ID, e.g. 0x1002', required=True)
parser.add_argument('--rev', help='Revision, e.g. 0x1000', required=True)
args = parser.parse_args()
main(args.bin, args.dfu, args.pad, args.vid, args.pid, args.rev)
|
<commit_before><commit_msg>Add a helper script to add a DFU header
This is so simple we can do it in 10 lines of Python. It matches the format of
the `add_capsule_header.py` utility.<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
#
# Copyright (C) 2020 Richard Hughes <richard@hughsie.com>
#
# SPDX-License-Identifier: LGPL-2.1+
import struct
import zlib
import argparse
def main(bin_fn, dfu_fn, pad, vid, pid, rev):
# read binary file
with open(bin_fn, 'rb') as f:
blob = f.read()
# pad blob to a specific size
if pad:
while len(blob) < int(pad, 16):
blob += b'\0'
# create DFU footer with checksum
blob += struct.pack('<HHHH3sB',
int(rev, 16), # version
int(pid, 16), # PID
int(vid, 16), # VID
0x0100, # DFU version
b'UFD', # signature
0x10) # hdrlen
crc32 = zlib.crc32(blob) ^ 0xffffffff
blob += struct.pack('<L', crc32)
# write binary file
with open(dfu_fn, 'wb') as f:
f.write(blob)
if __name__ == "__main__":
# parse args
parser = argparse.ArgumentParser(description='Add DFU footer on firmware')
parser.add_argument('--bin', help='Path to the .bin file', required=True)
parser.add_argument('--dfu', help='Output DFU file path', required=True)
parser.add_argument('--pad', help='Pad to a specific size, e.g. 0x4000', default=None)
parser.add_argument('--vid', help='Vendor ID, e.g. 0x273f', required=True)
parser.add_argument('--pid', help='Product ID, e.g. 0x1002', required=True)
parser.add_argument('--rev', help='Revision, e.g. 0x1000', required=True)
args = parser.parse_args()
main(args.bin, args.dfu, args.pad, args.vid, args.pid, args.rev)
|
|
1d4f747844ed62550c7a1ee22699845ef8dfa925
|
problem_32.py
|
problem_32.py
|
from time import time
DIGITS = 9
def calculate_pandigitals():
pandigitals = set()
for multiplicand in range(2, 99):
for multiplier in range(123, 9876/multiplicand):
product = multiplicand*multiplier
identity = str(multiplicand) + str(multiplier) + str(product)
if '0' not in identity and len(identity) == len(set(identity)) == DIGITS:
pandigitals.add(product)
return pandigitals
t = time()
pandigitals = calculate_pandigitals()
print 'Sum:', sum(pandigitals)
print 'Time:', time() - t
|
Add problem 32, pandigital products
|
Add problem 32, pandigital products
|
Python
|
mit
|
dimkarakostas/project-euler
|
Add problem 32, pandigital products
|
from time import time
DIGITS = 9
def calculate_pandigitals():
pandigitals = set()
for multiplicand in range(2, 99):
for multiplier in range(123, 9876/multiplicand):
product = multiplicand*multiplier
identity = str(multiplicand) + str(multiplier) + str(product)
if '0' not in identity and len(identity) == len(set(identity)) == DIGITS:
pandigitals.add(product)
return pandigitals
t = time()
pandigitals = calculate_pandigitals()
print 'Sum:', sum(pandigitals)
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 32, pandigital products<commit_after>
|
from time import time
DIGITS = 9
def calculate_pandigitals():
pandigitals = set()
for multiplicand in range(2, 99):
for multiplier in range(123, 9876/multiplicand):
product = multiplicand*multiplier
identity = str(multiplicand) + str(multiplier) + str(product)
if '0' not in identity and len(identity) == len(set(identity)) == DIGITS:
pandigitals.add(product)
return pandigitals
t = time()
pandigitals = calculate_pandigitals()
print 'Sum:', sum(pandigitals)
print 'Time:', time() - t
|
Add problem 32, pandigital productsfrom time import time
DIGITS = 9
def calculate_pandigitals():
pandigitals = set()
for multiplicand in range(2, 99):
for multiplier in range(123, 9876/multiplicand):
product = multiplicand*multiplier
identity = str(multiplicand) + str(multiplier) + str(product)
if '0' not in identity and len(identity) == len(set(identity)) == DIGITS:
pandigitals.add(product)
return pandigitals
t = time()
pandigitals = calculate_pandigitals()
print 'Sum:', sum(pandigitals)
print 'Time:', time() - t
|
<commit_before><commit_msg>Add problem 32, pandigital products<commit_after>from time import time
DIGITS = 9
def calculate_pandigitals():
pandigitals = set()
for multiplicand in range(2, 99):
for multiplier in range(123, 9876/multiplicand):
product = multiplicand*multiplier
identity = str(multiplicand) + str(multiplier) + str(product)
if '0' not in identity and len(identity) == len(set(identity)) == DIGITS:
pandigitals.add(product)
return pandigitals
t = time()
pandigitals = calculate_pandigitals()
print 'Sum:', sum(pandigitals)
print 'Time:', time() - t
|
|
8265d7f3dc9c6243ba338e7fc4c52117fd7340fd
|
qllbotnew.py
|
qllbotnew.py
|
import select
import socket
import time
HOST, PORT = 'localhost', 5000
class Bot(object):
timeout = 1
watchdog_threshold = 120
def __init__(self, host, port, max_retries=5):
self.host = host
self.port = port
self.max_retries = max_retries
self.socket = None
def connect(self):
""" Connect to a server. Retry if it is not available. """
connected = False
retries = 0
while not connected:
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except ConnectionRefusedError:
if retries >= self.max_retries:
print('No connection possible, sorry :(')
raise
time_sleep = (2 ** retries) * 5
print('Connection refused, retrying in %ds.' % time_sleep)
time.sleep(time_sleep)
retries += 1
else:
connected = True
def reconnect(self):
""" Reconnect to a server with host and port. """
self.disconnect()
self.connect()
def disconnect(self):
""" Disconnect from a server if a connection is open. """
if self.socket:
self.socket.close()
def data_available(self):
""" Checks if data is available on the socket w/ a timeout. """
rlist, _, __ = select.select([self.socket], [], [], self.timeout)
return self.socket in rlist
def send(self, msg):
self.socket.sendall(msg.encode('utf-8', 'replace') + b'\r\n')
def loop(self):
self.connect()
watchdog_counter = 0
sent_ping = False
while True:
if self.data_available():
data = self.socket.recv(4096)
if not data:
self.reconnect()
watchdog_counter += 1
if watchdog_counter > self.watchdog_threshold:
if sent_ping:
# server answered slower than watchdog_threshold seconds
self.reconnect()
sent_ping = False
else:
self.send('PING :fuugg')
sent_ping = True
watchdog_counter = 0
class IRC(object):
def __init__(self):
pass
if __name__ == '__main__':
bot = Bot(HOST, PORT)
bot.loop()
|
Add temporary testing script for qllbot v2.
|
Add temporary testing script for qllbot v2.
|
Python
|
bsd-2-clause
|
sqall01/qllbot,Javex/qllbot,qll/qllbot
|
Add temporary testing script for qllbot v2.
|
import select
import socket
import time
HOST, PORT = 'localhost', 5000
class Bot(object):
timeout = 1
watchdog_threshold = 120
def __init__(self, host, port, max_retries=5):
self.host = host
self.port = port
self.max_retries = max_retries
self.socket = None
def connect(self):
""" Connect to a server. Retry if it is not available. """
connected = False
retries = 0
while not connected:
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except ConnectionRefusedError:
if retries >= self.max_retries:
print('No connection possible, sorry :(')
raise
time_sleep = (2 ** retries) * 5
print('Connection refused, retrying in %ds.' % time_sleep)
time.sleep(time_sleep)
retries += 1
else:
connected = True
def reconnect(self):
""" Reconnect to a server with host and port. """
self.disconnect()
self.connect()
def disconnect(self):
""" Disconnect from a server if a connection is open. """
if self.socket:
self.socket.close()
def data_available(self):
""" Checks if data is available on the socket w/ a timeout. """
rlist, _, __ = select.select([self.socket], [], [], self.timeout)
return self.socket in rlist
def send(self, msg):
self.socket.sendall(msg.encode('utf-8', 'replace') + b'\r\n')
def loop(self):
self.connect()
watchdog_counter = 0
sent_ping = False
while True:
if self.data_available():
data = self.socket.recv(4096)
if not data:
self.reconnect()
watchdog_counter += 1
if watchdog_counter > self.watchdog_threshold:
if sent_ping:
# server answered slower than watchdog_threshold seconds
self.reconnect()
sent_ping = False
else:
self.send('PING :fuugg')
sent_ping = True
watchdog_counter = 0
class IRC(object):
def __init__(self):
pass
if __name__ == '__main__':
bot = Bot(HOST, PORT)
bot.loop()
|
<commit_before><commit_msg>Add temporary testing script for qllbot v2.<commit_after>
|
import select
import socket
import time
HOST, PORT = 'localhost', 5000
class Bot(object):
timeout = 1
watchdog_threshold = 120
def __init__(self, host, port, max_retries=5):
self.host = host
self.port = port
self.max_retries = max_retries
self.socket = None
def connect(self):
""" Connect to a server. Retry if it is not available. """
connected = False
retries = 0
while not connected:
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except ConnectionRefusedError:
if retries >= self.max_retries:
print('No connection possible, sorry :(')
raise
time_sleep = (2 ** retries) * 5
print('Connection refused, retrying in %ds.' % time_sleep)
time.sleep(time_sleep)
retries += 1
else:
connected = True
def reconnect(self):
""" Reconnect to a server with host and port. """
self.disconnect()
self.connect()
def disconnect(self):
""" Disconnect from a server if a connection is open. """
if self.socket:
self.socket.close()
def data_available(self):
""" Checks if data is available on the socket w/ a timeout. """
rlist, _, __ = select.select([self.socket], [], [], self.timeout)
return self.socket in rlist
def send(self, msg):
self.socket.sendall(msg.encode('utf-8', 'replace') + b'\r\n')
def loop(self):
self.connect()
watchdog_counter = 0
sent_ping = False
while True:
if self.data_available():
data = self.socket.recv(4096)
if not data:
self.reconnect()
watchdog_counter += 1
if watchdog_counter > self.watchdog_threshold:
if sent_ping:
# server answered slower than watchdog_threshold seconds
self.reconnect()
sent_ping = False
else:
self.send('PING :fuugg')
sent_ping = True
watchdog_counter = 0
class IRC(object):
def __init__(self):
pass
if __name__ == '__main__':
bot = Bot(HOST, PORT)
bot.loop()
|
Add temporary testing script for qllbot v2.import select
import socket
import time
HOST, PORT = 'localhost', 5000
class Bot(object):
timeout = 1
watchdog_threshold = 120
def __init__(self, host, port, max_retries=5):
self.host = host
self.port = port
self.max_retries = max_retries
self.socket = None
def connect(self):
""" Connect to a server. Retry if it is not available. """
connected = False
retries = 0
while not connected:
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except ConnectionRefusedError:
if retries >= self.max_retries:
print('No connection possible, sorry :(')
raise
time_sleep = (2 ** retries) * 5
print('Connection refused, retrying in %ds.' % time_sleep)
time.sleep(time_sleep)
retries += 1
else:
connected = True
def reconnect(self):
""" Reconnect to a server with host and port. """
self.disconnect()
self.connect()
def disconnect(self):
""" Disconnect from a server if a connection is open. """
if self.socket:
self.socket.close()
def data_available(self):
""" Checks if data is available on the socket w/ a timeout. """
rlist, _, __ = select.select([self.socket], [], [], self.timeout)
return self.socket in rlist
def send(self, msg):
self.socket.sendall(msg.encode('utf-8', 'replace') + b'\r\n')
def loop(self):
self.connect()
watchdog_counter = 0
sent_ping = False
while True:
if self.data_available():
data = self.socket.recv(4096)
if not data:
self.reconnect()
watchdog_counter += 1
if watchdog_counter > self.watchdog_threshold:
if sent_ping:
# server answered slower than watchdog_threshold seconds
self.reconnect()
sent_ping = False
else:
self.send('PING :fuugg')
sent_ping = True
watchdog_counter = 0
class IRC(object):
def __init__(self):
pass
if __name__ == '__main__':
bot = Bot(HOST, PORT)
bot.loop()
|
<commit_before><commit_msg>Add temporary testing script for qllbot v2.<commit_after>import select
import socket
import time
HOST, PORT = 'localhost', 5000
class Bot(object):
timeout = 1
watchdog_threshold = 120
def __init__(self, host, port, max_retries=5):
self.host = host
self.port = port
self.max_retries = max_retries
self.socket = None
def connect(self):
""" Connect to a server. Retry if it is not available. """
connected = False
retries = 0
while not connected:
try:
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.connect((self.host, self.port))
except ConnectionRefusedError:
if retries >= self.max_retries:
print('No connection possible, sorry :(')
raise
time_sleep = (2 ** retries) * 5
print('Connection refused, retrying in %ds.' % time_sleep)
time.sleep(time_sleep)
retries += 1
else:
connected = True
def reconnect(self):
""" Reconnect to a server with host and port. """
self.disconnect()
self.connect()
def disconnect(self):
""" Disconnect from a server if a connection is open. """
if self.socket:
self.socket.close()
def data_available(self):
""" Checks if data is available on the socket w/ a timeout. """
rlist, _, __ = select.select([self.socket], [], [], self.timeout)
return self.socket in rlist
def send(self, msg):
self.socket.sendall(msg.encode('utf-8', 'replace') + b'\r\n')
def loop(self):
self.connect()
watchdog_counter = 0
sent_ping = False
while True:
if self.data_available():
data = self.socket.recv(4096)
if not data:
self.reconnect()
watchdog_counter += 1
if watchdog_counter > self.watchdog_threshold:
if sent_ping:
# server answered slower than watchdog_threshold seconds
self.reconnect()
sent_ping = False
else:
self.send('PING :fuugg')
sent_ping = True
watchdog_counter = 0
class IRC(object):
def __init__(self):
pass
if __name__ == '__main__':
bot = Bot(HOST, PORT)
bot.loop()
|
|
4b7f008c691d14894ea17c3263c7c68fab66f246
|
blackjack/__init__.py
|
blackjack/__init__.py
|
"""The init file for the blackjack package."""
__author__ = 'Christopher Randall Wells'
__copyright__ = 'Copyright 2015 Christopher Randall Wells'
__license__ = 'MIT'
__title__ = 'blackjack'
__version__ = '0.1'
|
Add basic package directory and init file
|
Add basic package directory and init file
Add a basic package directory containing a simple __init__.py file to define the elements of the package.
|
Python
|
mit
|
ExcaliburZero/blackjack
|
Add basic package directory and init file
Add a basic package directory containing a simple __init__.py file to define the elements of the package.
|
"""The init file for the blackjack package."""
__author__ = 'Christopher Randall Wells'
__copyright__ = 'Copyright 2015 Christopher Randall Wells'
__license__ = 'MIT'
__title__ = 'blackjack'
__version__ = '0.1'
|
<commit_before><commit_msg>Add basic package directory and init file
Add a basic package directory containing a simple __init__.py file to define the elements of the package.<commit_after>
|
"""The init file for the blackjack package."""
__author__ = 'Christopher Randall Wells'
__copyright__ = 'Copyright 2015 Christopher Randall Wells'
__license__ = 'MIT'
__title__ = 'blackjack'
__version__ = '0.1'
|
Add basic package directory and init file
Add a basic package directory containing a simple __init__.py file to define the elements of the package."""The init file for the blackjack package."""
__author__ = 'Christopher Randall Wells'
__copyright__ = 'Copyright 2015 Christopher Randall Wells'
__license__ = 'MIT'
__title__ = 'blackjack'
__version__ = '0.1'
|
<commit_before><commit_msg>Add basic package directory and init file
Add a basic package directory containing a simple __init__.py file to define the elements of the package.<commit_after>"""The init file for the blackjack package."""
__author__ = 'Christopher Randall Wells'
__copyright__ = 'Copyright 2015 Christopher Randall Wells'
__license__ = 'MIT'
__title__ = 'blackjack'
__version__ = '0.1'
|
|
25c6fc731be491aa144867995b7b9d5f646414f2
|
tests/basics/builtin_delattr.py
|
tests/basics/builtin_delattr.py
|
# test builtin delattr
class A: pass
a = A()
a.x = 1
print(a.x)
delattr(a, 'x')
try:
a.x
except AttributeError:
print('AttributeError')
try:
delattr(a, 'x')
except AttributeError:
print('AttributeError')
|
Add test for builtin "delattr".
|
tests/basics: Add test for builtin "delattr".
|
Python
|
mit
|
swegener/micropython,ryannathans/micropython,PappaPeppar/micropython,MrSurly/micropython,MrSurly/micropython,hosaka/micropython,tobbad/micropython,matthewelse/micropython,cwyark/micropython,Peetz0r/micropython-esp32,mhoffma/micropython,puuu/micropython,selste/micropython,puuu/micropython,deshipu/micropython,blazewicz/micropython,kerneltask/micropython,dmazzella/micropython,alex-march/micropython,Peetz0r/micropython-esp32,tralamazza/micropython,pozetroninc/micropython,kerneltask/micropython,alex-robbins/micropython,ryannathans/micropython,lowRISC/micropython,tralamazza/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,bvernoux/micropython,adafruit/micropython,mhoffma/micropython,hosaka/micropython,micropython/micropython-esp32,MrSurly/micropython,hosaka/micropython,mhoffma/micropython,swegener/micropython,SHA2017-badge/micropython-esp32,hiway/micropython,dxxb/micropython,Timmenem/micropython,MrSurly/micropython-esp32,tobbad/micropython,deshipu/micropython,oopy/micropython,alex-robbins/micropython,jmarcelino/pycom-micropython,dxxb/micropython,selste/micropython,micropython/micropython-esp32,torwag/micropython,chrisdearman/micropython,pfalcon/micropython,Peetz0r/micropython-esp32,torwag/micropython,TDAbboud/micropython,Timmenem/micropython,blazewicz/micropython,deshipu/micropython,AriZuu/micropython,cwyark/micropython,toolmacher/micropython,toolmacher/micropython,AriZuu/micropython,mhoffma/micropython,TDAbboud/micropython,swegener/micropython,alex-march/micropython,torwag/micropython,dxxb/micropython,AriZuu/micropython,hosaka/micropython,hiway/micropython,hosaka/micropython,dmazzella/micropython,tobbad/micropython,lowRISC/micropython,pozetroninc/micropython,ryannathans/micropython,tobbad/micropython,bvernoux/micropython,jmarcelino/pycom-micropython,pozetroninc/micropython,adafruit/circuitpython,pfalcon/micropython,henriknelson/micropython,torwag/micropython,bvernoux/micropython,matthewelse/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,blazewicz/micropython,HenrikSolver/micropython,micropython/micropython-esp32,adafruit/circuitpython,ryannathans/micropython,puuu/micropython,MrSurly/micropython-esp32,HenrikSolver/micropython,kerneltask/micropython,cwyark/micropython,adafruit/micropython,henriknelson/micropython,oopy/micropython,hiway/micropython,matthewelse/micropython,alex-robbins/micropython,selste/micropython,PappaPeppar/micropython,bvernoux/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,oopy/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,kerneltask/micropython,blazewicz/micropython,infinnovation/micropython,adafruit/circuitpython,pramasoul/micropython,hiway/micropython,henriknelson/micropython,lowRISC/micropython,TDAbboud/micropython,pfalcon/micropython,mhoffma/micropython,toolmacher/micropython,alex-march/micropython,dmazzella/micropython,adafruit/micropython,tuc-osg/micropython,alex-march/micropython,PappaPeppar/micropython,swegener/micropython,pfalcon/micropython,PappaPeppar/micropython,alex-march/micropython,cwyark/micropython,dxxb/micropython,MrSurly/micropython-esp32,henriknelson/micropython,jmarcelino/pycom-micropython,toolmacher/micropython,PappaPeppar/micropython,swegener/micropython,tralamazza/micropython,trezor/micropython,tuc-osg/micropython,Timmenem/micropython,torwag/micropython,henriknelson/micropython,MrSurly/micropython,matthewelse/micropython,AriZuu/micropython,micropython/micropython-esp32,AriZuu/micropython,selste/micropython,lowRISC/micropython,chrisdearman/micropython,hiway/micropython,matthewelse/micropython,tobbad/micropython,jmarcelino/pycom-micropython,cwyark/micropython,tuc-osg/micropython,oopy/micropython,tuc-osg/micropython,adafruit/circuitpython,alex-robbins/micropython,HenrikSolver/micropython,pramasoul/micropython,dmazzella/micropython,toolmacher/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,trezor/micropython,puuu/micropython,TDAbboud/micropython,MrSurly/micropython,adafruit/circuitpython,bvernoux/micropython,pfalcon/micropython,TDAbboud/micropython,chrisdearman/micropython,oopy/micropython,MrSurly/micropython-esp32,SHA2017-badge/micropython-esp32,adafruit/circuitpython,deshipu/micropython,trezor/micropython,HenrikSolver/micropython,selste/micropython,infinnovation/micropython,lowRISC/micropython,pramasoul/micropython,Timmenem/micropython,ryannathans/micropython,infinnovation/micropython,trezor/micropython,dxxb/micropython,pramasoul/micropython,adafruit/micropython,deshipu/micropython,pramasoul/micropython,jmarcelino/pycom-micropython,tralamazza/micropython,infinnovation/micropython,pozetroninc/micropython,Timmenem/micropython,kerneltask/micropython,blazewicz/micropython,micropython/micropython-esp32,HenrikSolver/micropython,trezor/micropython,chrisdearman/micropython,Peetz0r/micropython-esp32,pozetroninc/micropython,matthewelse/micropython
|
tests/basics: Add test for builtin "delattr".
|
# test builtin delattr
class A: pass
a = A()
a.x = 1
print(a.x)
delattr(a, 'x')
try:
a.x
except AttributeError:
print('AttributeError')
try:
delattr(a, 'x')
except AttributeError:
print('AttributeError')
|
<commit_before><commit_msg>tests/basics: Add test for builtin "delattr".<commit_after>
|
# test builtin delattr
class A: pass
a = A()
a.x = 1
print(a.x)
delattr(a, 'x')
try:
a.x
except AttributeError:
print('AttributeError')
try:
delattr(a, 'x')
except AttributeError:
print('AttributeError')
|
tests/basics: Add test for builtin "delattr".# test builtin delattr
class A: pass
a = A()
a.x = 1
print(a.x)
delattr(a, 'x')
try:
a.x
except AttributeError:
print('AttributeError')
try:
delattr(a, 'x')
except AttributeError:
print('AttributeError')
|
<commit_before><commit_msg>tests/basics: Add test for builtin "delattr".<commit_after># test builtin delattr
class A: pass
a = A()
a.x = 1
print(a.x)
delattr(a, 'x')
try:
a.x
except AttributeError:
print('AttributeError')
try:
delattr(a, 'x')
except AttributeError:
print('AttributeError')
|
|
bafa7a030192549f19de886dc9bc251f59668f5e
|
tests/formats_test/swap_test.py
|
tests/formats_test/swap_test.py
|
import test_compat # pylint: disable=unused-import
import six
import unittest
from blivet.devices.storage import StorageDevice
from blivet.errors import DeviceError
from blivet.formats import get_format
from blivet.size import Size
class SwapNodevTestCase(unittest.TestCase):
def test_swap_max_size(self):
StorageDevice("dev", size=Size("129 GiB"),
fmt=get_format("swap"))
StorageDevice("dev", size=Size("15 TiB"),
fmt=get_format("swap"))
with six.assertRaisesRegex(self, DeviceError, "device is too large for new format"):
StorageDevice("dev", size=Size("17 TiB"),
fmt=get_format("swap"))
|
Add test for SwapSpace max size
|
Add test for SwapSpace max size
|
Python
|
lgpl-2.1
|
vojtechtrefny/blivet,rvykydal/blivet,jkonecny12/blivet,rvykydal/blivet,vojtechtrefny/blivet,jkonecny12/blivet
|
Add test for SwapSpace max size
|
import test_compat # pylint: disable=unused-import
import six
import unittest
from blivet.devices.storage import StorageDevice
from blivet.errors import DeviceError
from blivet.formats import get_format
from blivet.size import Size
class SwapNodevTestCase(unittest.TestCase):
def test_swap_max_size(self):
StorageDevice("dev", size=Size("129 GiB"),
fmt=get_format("swap"))
StorageDevice("dev", size=Size("15 TiB"),
fmt=get_format("swap"))
with six.assertRaisesRegex(self, DeviceError, "device is too large for new format"):
StorageDevice("dev", size=Size("17 TiB"),
fmt=get_format("swap"))
|
<commit_before><commit_msg>Add test for SwapSpace max size<commit_after>
|
import test_compat # pylint: disable=unused-import
import six
import unittest
from blivet.devices.storage import StorageDevice
from blivet.errors import DeviceError
from blivet.formats import get_format
from blivet.size import Size
class SwapNodevTestCase(unittest.TestCase):
def test_swap_max_size(self):
StorageDevice("dev", size=Size("129 GiB"),
fmt=get_format("swap"))
StorageDevice("dev", size=Size("15 TiB"),
fmt=get_format("swap"))
with six.assertRaisesRegex(self, DeviceError, "device is too large for new format"):
StorageDevice("dev", size=Size("17 TiB"),
fmt=get_format("swap"))
|
Add test for SwapSpace max sizeimport test_compat # pylint: disable=unused-import
import six
import unittest
from blivet.devices.storage import StorageDevice
from blivet.errors import DeviceError
from blivet.formats import get_format
from blivet.size import Size
class SwapNodevTestCase(unittest.TestCase):
def test_swap_max_size(self):
StorageDevice("dev", size=Size("129 GiB"),
fmt=get_format("swap"))
StorageDevice("dev", size=Size("15 TiB"),
fmt=get_format("swap"))
with six.assertRaisesRegex(self, DeviceError, "device is too large for new format"):
StorageDevice("dev", size=Size("17 TiB"),
fmt=get_format("swap"))
|
<commit_before><commit_msg>Add test for SwapSpace max size<commit_after>import test_compat # pylint: disable=unused-import
import six
import unittest
from blivet.devices.storage import StorageDevice
from blivet.errors import DeviceError
from blivet.formats import get_format
from blivet.size import Size
class SwapNodevTestCase(unittest.TestCase):
def test_swap_max_size(self):
StorageDevice("dev", size=Size("129 GiB"),
fmt=get_format("swap"))
StorageDevice("dev", size=Size("15 TiB"),
fmt=get_format("swap"))
with six.assertRaisesRegex(self, DeviceError, "device is too large for new format"):
StorageDevice("dev", size=Size("17 TiB"),
fmt=get_format("swap"))
|
|
2575a39d29b904fe32dcf45333c8429bc92085ea
|
tastypie_search/signals.py
|
tastypie_search/signals.py
|
""" Additional Haystack Signal Processors
This module provides additional HAYSTACK_SIGNAL_PROCESSOR classes to reindex more effeciently.
They may be tied to various other haystack-related libraries.
They are not tied to `tastypie_search` in any way! Use them anywhere Haystack is installed.
"""
import sys
from django.conf import settings
# TODO: Using "importlib" requires py2.7; find another way.
import inspect
import importlib
from haystack.indexes import SearchIndex
from queued_search.signals import QueuedSignalProcessor
import logging
logger = logging.getLogger(__name__)
def indexed_models():
""" Use introspection to find haystack's indexed models.
These models need to trigger `update_index` when the ORM changes them.
Each application in settings.INSTALLED_APPS must have the module:
import app.search_indexes
... and there must be classes derived from:
import haystack.indexes.SearchIndex
"""
models_list = []
for app in settings.INSTALLED_APPS:
try:
# find app.search_indexes modules in each app...
search_indexes = importlib.import_module('%s.search_indexes' % app)
search_indexes = [ m for n, m in inspect.getmembers(search_indexes, inspect.isclass) if issubclass(m, SearchIndex) ]
logger.debug("%s.search_indexes: %r" % (app, search_indexes))
models_list = [ m().get_model() for m in search_indexes ]
logger.debug("Indexed Models: %r" % models_list)
except ImportError as e:
pass
return models_list
class ModelCheckingQueuedSignalProcessor(QueuedSignalProcessor):
""" This signal processor tells Haystack to queue up only
the models that have full text indexes.
Declare this signal processor class in the django.settings as a full path.
HAYSTACK_SIGNAL_PROCESSOR='tastypie_search.signals.ModelCheckingQueuedSignalProcessor'
"""
def __init__(self, *args, **kwargs):
super(ModelCheckingQueuedSignalProcessor, self).__init__(*args, **kwargs)
self._indexed_models = tuple(indexed_models())
def enqueue_save(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('update', instance)
def enqueue_delete(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('delete', instance)
|
Add a signal processor that only records Models that have indexes.
|
Add a signal processor that only records Models that have indexes.
|
Python
|
mit
|
adroffner/tastypie-searchable
|
Add a signal processor that only records Models that have indexes.
|
""" Additional Haystack Signal Processors
This module provides additional HAYSTACK_SIGNAL_PROCESSOR classes to reindex more effeciently.
They may be tied to various other haystack-related libraries.
They are not tied to `tastypie_search` in any way! Use them anywhere Haystack is installed.
"""
import sys
from django.conf import settings
# TODO: Using "importlib" requires py2.7; find another way.
import inspect
import importlib
from haystack.indexes import SearchIndex
from queued_search.signals import QueuedSignalProcessor
import logging
logger = logging.getLogger(__name__)
def indexed_models():
""" Use introspection to find haystack's indexed models.
These models need to trigger `update_index` when the ORM changes them.
Each application in settings.INSTALLED_APPS must have the module:
import app.search_indexes
... and there must be classes derived from:
import haystack.indexes.SearchIndex
"""
models_list = []
for app in settings.INSTALLED_APPS:
try:
# find app.search_indexes modules in each app...
search_indexes = importlib.import_module('%s.search_indexes' % app)
search_indexes = [ m for n, m in inspect.getmembers(search_indexes, inspect.isclass) if issubclass(m, SearchIndex) ]
logger.debug("%s.search_indexes: %r" % (app, search_indexes))
models_list = [ m().get_model() for m in search_indexes ]
logger.debug("Indexed Models: %r" % models_list)
except ImportError as e:
pass
return models_list
class ModelCheckingQueuedSignalProcessor(QueuedSignalProcessor):
""" This signal processor tells Haystack to queue up only
the models that have full text indexes.
Declare this signal processor class in the django.settings as a full path.
HAYSTACK_SIGNAL_PROCESSOR='tastypie_search.signals.ModelCheckingQueuedSignalProcessor'
"""
def __init__(self, *args, **kwargs):
super(ModelCheckingQueuedSignalProcessor, self).__init__(*args, **kwargs)
self._indexed_models = tuple(indexed_models())
def enqueue_save(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('update', instance)
def enqueue_delete(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('delete', instance)
|
<commit_before><commit_msg>Add a signal processor that only records Models that have indexes.<commit_after>
|
""" Additional Haystack Signal Processors
This module provides additional HAYSTACK_SIGNAL_PROCESSOR classes to reindex more effeciently.
They may be tied to various other haystack-related libraries.
They are not tied to `tastypie_search` in any way! Use them anywhere Haystack is installed.
"""
import sys
from django.conf import settings
# TODO: Using "importlib" requires py2.7; find another way.
import inspect
import importlib
from haystack.indexes import SearchIndex
from queued_search.signals import QueuedSignalProcessor
import logging
logger = logging.getLogger(__name__)
def indexed_models():
""" Use introspection to find haystack's indexed models.
These models need to trigger `update_index` when the ORM changes them.
Each application in settings.INSTALLED_APPS must have the module:
import app.search_indexes
... and there must be classes derived from:
import haystack.indexes.SearchIndex
"""
models_list = []
for app in settings.INSTALLED_APPS:
try:
# find app.search_indexes modules in each app...
search_indexes = importlib.import_module('%s.search_indexes' % app)
search_indexes = [ m for n, m in inspect.getmembers(search_indexes, inspect.isclass) if issubclass(m, SearchIndex) ]
logger.debug("%s.search_indexes: %r" % (app, search_indexes))
models_list = [ m().get_model() for m in search_indexes ]
logger.debug("Indexed Models: %r" % models_list)
except ImportError as e:
pass
return models_list
class ModelCheckingQueuedSignalProcessor(QueuedSignalProcessor):
""" This signal processor tells Haystack to queue up only
the models that have full text indexes.
Declare this signal processor class in the django.settings as a full path.
HAYSTACK_SIGNAL_PROCESSOR='tastypie_search.signals.ModelCheckingQueuedSignalProcessor'
"""
def __init__(self, *args, **kwargs):
super(ModelCheckingQueuedSignalProcessor, self).__init__(*args, **kwargs)
self._indexed_models = tuple(indexed_models())
def enqueue_save(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('update', instance)
def enqueue_delete(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('delete', instance)
|
Add a signal processor that only records Models that have indexes.""" Additional Haystack Signal Processors
This module provides additional HAYSTACK_SIGNAL_PROCESSOR classes to reindex more effeciently.
They may be tied to various other haystack-related libraries.
They are not tied to `tastypie_search` in any way! Use them anywhere Haystack is installed.
"""
import sys
from django.conf import settings
# TODO: Using "importlib" requires py2.7; find another way.
import inspect
import importlib
from haystack.indexes import SearchIndex
from queued_search.signals import QueuedSignalProcessor
import logging
logger = logging.getLogger(__name__)
def indexed_models():
""" Use introspection to find haystack's indexed models.
These models need to trigger `update_index` when the ORM changes them.
Each application in settings.INSTALLED_APPS must have the module:
import app.search_indexes
... and there must be classes derived from:
import haystack.indexes.SearchIndex
"""
models_list = []
for app in settings.INSTALLED_APPS:
try:
# find app.search_indexes modules in each app...
search_indexes = importlib.import_module('%s.search_indexes' % app)
search_indexes = [ m for n, m in inspect.getmembers(search_indexes, inspect.isclass) if issubclass(m, SearchIndex) ]
logger.debug("%s.search_indexes: %r" % (app, search_indexes))
models_list = [ m().get_model() for m in search_indexes ]
logger.debug("Indexed Models: %r" % models_list)
except ImportError as e:
pass
return models_list
class ModelCheckingQueuedSignalProcessor(QueuedSignalProcessor):
""" This signal processor tells Haystack to queue up only
the models that have full text indexes.
Declare this signal processor class in the django.settings as a full path.
HAYSTACK_SIGNAL_PROCESSOR='tastypie_search.signals.ModelCheckingQueuedSignalProcessor'
"""
def __init__(self, *args, **kwargs):
super(ModelCheckingQueuedSignalProcessor, self).__init__(*args, **kwargs)
self._indexed_models = tuple(indexed_models())
def enqueue_save(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('update', instance)
def enqueue_delete(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('delete', instance)
|
<commit_before><commit_msg>Add a signal processor that only records Models that have indexes.<commit_after>""" Additional Haystack Signal Processors
This module provides additional HAYSTACK_SIGNAL_PROCESSOR classes to reindex more effeciently.
They may be tied to various other haystack-related libraries.
They are not tied to `tastypie_search` in any way! Use them anywhere Haystack is installed.
"""
import sys
from django.conf import settings
# TODO: Using "importlib" requires py2.7; find another way.
import inspect
import importlib
from haystack.indexes import SearchIndex
from queued_search.signals import QueuedSignalProcessor
import logging
logger = logging.getLogger(__name__)
def indexed_models():
""" Use introspection to find haystack's indexed models.
These models need to trigger `update_index` when the ORM changes them.
Each application in settings.INSTALLED_APPS must have the module:
import app.search_indexes
... and there must be classes derived from:
import haystack.indexes.SearchIndex
"""
models_list = []
for app in settings.INSTALLED_APPS:
try:
# find app.search_indexes modules in each app...
search_indexes = importlib.import_module('%s.search_indexes' % app)
search_indexes = [ m for n, m in inspect.getmembers(search_indexes, inspect.isclass) if issubclass(m, SearchIndex) ]
logger.debug("%s.search_indexes: %r" % (app, search_indexes))
models_list = [ m().get_model() for m in search_indexes ]
logger.debug("Indexed Models: %r" % models_list)
except ImportError as e:
pass
return models_list
class ModelCheckingQueuedSignalProcessor(QueuedSignalProcessor):
""" This signal processor tells Haystack to queue up only
the models that have full text indexes.
Declare this signal processor class in the django.settings as a full path.
HAYSTACK_SIGNAL_PROCESSOR='tastypie_search.signals.ModelCheckingQueuedSignalProcessor'
"""
def __init__(self, *args, **kwargs):
super(ModelCheckingQueuedSignalProcessor, self).__init__(*args, **kwargs)
self._indexed_models = tuple(indexed_models())
def enqueue_save(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('update', instance)
def enqueue_delete(self, sender, instance, **kwargs):
if isinstance(instance, self._indexed_models):
return self.enqueue('delete', instance)
|
|
7b6fe1d3dfe6c4397729e085773dda73d4791716
|
remove-notes.py
|
remove-notes.py
|
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import sys
with open(sys.argv[1], "r") as f:
tree = BeautifulSoup(f, 'html.parser')
for note in tree.find_all("div", class_ = "notes"):
note.decompose()
print(tree)
|
Add script to remove notes
|
Add script to remove notes
The script can be used to remove the notes in a compiled presentation.
This is useful when publishing slides so the notes intended for the
presenter only are not published as well.
|
Python
|
mit
|
frederikmoellers/hovercraft-template
|
Add script to remove notes
The script can be used to remove the notes in a compiled presentation.
This is useful when publishing slides so the notes intended for the
presenter only are not published as well.
|
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import sys
with open(sys.argv[1], "r") as f:
tree = BeautifulSoup(f, 'html.parser')
for note in tree.find_all("div", class_ = "notes"):
note.decompose()
print(tree)
|
<commit_before><commit_msg>Add script to remove notes
The script can be used to remove the notes in a compiled presentation.
This is useful when publishing slides so the notes intended for the
presenter only are not published as well.<commit_after>
|
#!/usr/bin/env python3
from bs4 import BeautifulSoup
import sys
with open(sys.argv[1], "r") as f:
tree = BeautifulSoup(f, 'html.parser')
for note in tree.find_all("div", class_ = "notes"):
note.decompose()
print(tree)
|
Add script to remove notes
The script can be used to remove the notes in a compiled presentation.
This is useful when publishing slides so the notes intended for the
presenter only are not published as well.#!/usr/bin/env python3
from bs4 import BeautifulSoup
import sys
with open(sys.argv[1], "r") as f:
tree = BeautifulSoup(f, 'html.parser')
for note in tree.find_all("div", class_ = "notes"):
note.decompose()
print(tree)
|
<commit_before><commit_msg>Add script to remove notes
The script can be used to remove the notes in a compiled presentation.
This is useful when publishing slides so the notes intended for the
presenter only are not published as well.<commit_after>#!/usr/bin/env python3
from bs4 import BeautifulSoup
import sys
with open(sys.argv[1], "r") as f:
tree = BeautifulSoup(f, 'html.parser')
for note in tree.find_all("div", class_ = "notes"):
note.decompose()
print(tree)
|
|
d7bce6c1e48ba1f5a0e50909e21ec2a8db86bca7
|
instance/epio_config.py
|
instance/epio_config.py
|
"""\
Epio Config
The recommended usage of this file is to rename it to <servername>_config.py.
This can be loaded by setting the SETTINGS_MODULE environment variable to here.
See 'default_config.py' for a complete list of overridable settings.
"""
# Security settings
SECRET_KEY = '<REMOVED>'
# Logging settings
ERROR_EMAIL_INFO = (('smtp.gmail.com', 587),
'"Epio Error Notification" <epio.errors@gmail.com>', ['espo58@gmail.com'],
'Tabhouse Error', ('epio.errors@gmail.com', '1UMbJXCNgCHvEoEiMiv9'))
# Analytics settings
ANALYTICS_SCRIPT = """\
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-26864275-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
"""
# User feedback settings
FEEDBACK_BLOCK = """\
<style>
#uvTab { top: 30% !important; }
@media only screen and (max-device-width: 480px) {
#uvTab { display: none !important; }
}
@media handheld {
#uvTab { display: none !important; }
}
</style>
<script>
var uvOptions = {};
(function() {
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/eY2YtrTu2YUkWsocmUlmg.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
})();
</script>
"""
|
Add ep.io config for historical purposes.
|
Add ep.io config for historical purposes.
|
Python
|
mit
|
joeyespo/tabhouse,joeyespo/tabhouse.org,joeyespo/tabhouse.org,joeyespo/tabhouse
|
Add ep.io config for historical purposes.
|
"""\
Epio Config
The recommended usage of this file is to rename it to <servername>_config.py.
This can be loaded by setting the SETTINGS_MODULE environment variable to here.
See 'default_config.py' for a complete list of overridable settings.
"""
# Security settings
SECRET_KEY = '<REMOVED>'
# Logging settings
ERROR_EMAIL_INFO = (('smtp.gmail.com', 587),
'"Epio Error Notification" <epio.errors@gmail.com>', ['espo58@gmail.com'],
'Tabhouse Error', ('epio.errors@gmail.com', '1UMbJXCNgCHvEoEiMiv9'))
# Analytics settings
ANALYTICS_SCRIPT = """\
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-26864275-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
"""
# User feedback settings
FEEDBACK_BLOCK = """\
<style>
#uvTab { top: 30% !important; }
@media only screen and (max-device-width: 480px) {
#uvTab { display: none !important; }
}
@media handheld {
#uvTab { display: none !important; }
}
</style>
<script>
var uvOptions = {};
(function() {
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/eY2YtrTu2YUkWsocmUlmg.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
})();
</script>
"""
|
<commit_before><commit_msg>Add ep.io config for historical purposes.<commit_after>
|
"""\
Epio Config
The recommended usage of this file is to rename it to <servername>_config.py.
This can be loaded by setting the SETTINGS_MODULE environment variable to here.
See 'default_config.py' for a complete list of overridable settings.
"""
# Security settings
SECRET_KEY = '<REMOVED>'
# Logging settings
ERROR_EMAIL_INFO = (('smtp.gmail.com', 587),
'"Epio Error Notification" <epio.errors@gmail.com>', ['espo58@gmail.com'],
'Tabhouse Error', ('epio.errors@gmail.com', '1UMbJXCNgCHvEoEiMiv9'))
# Analytics settings
ANALYTICS_SCRIPT = """\
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-26864275-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
"""
# User feedback settings
FEEDBACK_BLOCK = """\
<style>
#uvTab { top: 30% !important; }
@media only screen and (max-device-width: 480px) {
#uvTab { display: none !important; }
}
@media handheld {
#uvTab { display: none !important; }
}
</style>
<script>
var uvOptions = {};
(function() {
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/eY2YtrTu2YUkWsocmUlmg.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
})();
</script>
"""
|
Add ep.io config for historical purposes."""\
Epio Config
The recommended usage of this file is to rename it to <servername>_config.py.
This can be loaded by setting the SETTINGS_MODULE environment variable to here.
See 'default_config.py' for a complete list of overridable settings.
"""
# Security settings
SECRET_KEY = '<REMOVED>'
# Logging settings
ERROR_EMAIL_INFO = (('smtp.gmail.com', 587),
'"Epio Error Notification" <epio.errors@gmail.com>', ['espo58@gmail.com'],
'Tabhouse Error', ('epio.errors@gmail.com', '1UMbJXCNgCHvEoEiMiv9'))
# Analytics settings
ANALYTICS_SCRIPT = """\
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-26864275-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
"""
# User feedback settings
FEEDBACK_BLOCK = """\
<style>
#uvTab { top: 30% !important; }
@media only screen and (max-device-width: 480px) {
#uvTab { display: none !important; }
}
@media handheld {
#uvTab { display: none !important; }
}
</style>
<script>
var uvOptions = {};
(function() {
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/eY2YtrTu2YUkWsocmUlmg.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
})();
</script>
"""
|
<commit_before><commit_msg>Add ep.io config for historical purposes.<commit_after>"""\
Epio Config
The recommended usage of this file is to rename it to <servername>_config.py.
This can be loaded by setting the SETTINGS_MODULE environment variable to here.
See 'default_config.py' for a complete list of overridable settings.
"""
# Security settings
SECRET_KEY = '<REMOVED>'
# Logging settings
ERROR_EMAIL_INFO = (('smtp.gmail.com', 587),
'"Epio Error Notification" <epio.errors@gmail.com>', ['espo58@gmail.com'],
'Tabhouse Error', ('epio.errors@gmail.com', '1UMbJXCNgCHvEoEiMiv9'))
# Analytics settings
ANALYTICS_SCRIPT = """\
var _gaq = _gaq || [];
_gaq.push(['_setAccount', 'UA-26864275-1']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);
})();
"""
# User feedback settings
FEEDBACK_BLOCK = """\
<style>
#uvTab { top: 30% !important; }
@media only screen and (max-device-width: 480px) {
#uvTab { display: none !important; }
}
@media handheld {
#uvTab { display: none !important; }
}
</style>
<script>
var uvOptions = {};
(function() {
var uv = document.createElement('script'); uv.type = 'text/javascript'; uv.async = true;
uv.src = ('https:' == document.location.protocol ? 'https://' : 'http://') + 'widget.uservoice.com/eY2YtrTu2YUkWsocmUlmg.js';
var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(uv, s);
})();
</script>
"""
|
|
d38392998869319677cc884836c5952441f6ac62
|
pokemongo_bot/base_task.py
|
pokemongo_bot/base_task.py
|
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
Support for log_delay for all tasks
|
Support for log_delay for all tasks
|
Python
|
mit
|
lythien/pokemongo,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,DBa2016/PokemonGo-Bot,halsafar/PokemonGo-Bot,goshan/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,lythien/pokemongo,pengzhangdev/PokemonGo-Bot,halsafar/PokemonGo-Bot,halsafar/PokemonGo-Bot,heihachi/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,dtee/PokemonGo-Bot,DBa2016/PokemonGo-Bot,cmezh/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,heihachi/PokemonGo-Bot,lythien/pokemongo,goedzo/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,goshan/PokemonGo-Bot,cmezh/PokemonGo-Bot,halsafar/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot
|
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
Support for log_delay for all tasks
|
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
<commit_before>import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
<commit_msg>Support for log_delay for all tasks<commit_after>
|
import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
Support for log_delay for all tasksimport logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
<commit_before>import logging
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
<commit_msg>Support for log_delay for all tasks<commit_after>import logging
import time
class BaseTask(object):
TASK_API_VERSION = 1
def __init__(self, bot, config):
"""
:param bot:
:type bot: pokemongo_bot.PokemonGoBot
:param config:
:return:
"""
self.bot = bot
self.config = config
self._validate_work_exists()
self.logger = logging.getLogger(type(self).__name__)
self.enabled = config.get('enabled', True)
self.last_log_time = time.time()
self.initialize()
def _validate_work_exists(self):
method = getattr(self, 'work', None)
if not method or not callable(method):
raise NotImplementedError('Missing "work" method')
def emit_event(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
sender=self
# Print log only if X seconds are passed from last log
if (time.time() - self.last_log_time) > self.config.get('log_delay', 0):
self.last_log_time = time.time()
self.bot.event_manager.emit(
event,
sender=sender,
level=level,
formatted=formatted,
data=data
)
def initialize(self):
pass
|
e574919647565d046b40f31de765e111ffee9268
|
tests/example_tests/test_custom_struct.py
|
tests/example_tests/test_custom_struct.py
|
import re
import unittest
from example_tests import example_test
class TestCustomStruct(unittest.TestCase):
def test_builtin_vectors(self):
output = example_test.run_example('custom_struct/builtin_vectors.py')
assert re.match(
r"Kernel output matches expected value.",
output.decode('utf-8')
)
def test_packed_matrix(self):
output = example_test.run_example('custom_struct/packed_matrix.py')
assert re.match(
r"Kernel output matches expected value for type 'float'.\n"
r"Kernel output matches expected value for type 'double'.",
output.decode('utf-8'),
)
def test_complex_struct(self):
output = example_test.run_example('custom_struct/complex_struct.py')
assert re.match(
r"Overall structure itemsize: \d+ bytes\n"
r"Structure members itemsize: \[(\s*\d+){5}]\n"
r"Structure members offsets: \[(\s*\d+){5}]\n"
r"Complex structure value:\n"
r"\s+\[.*\]\n"
r"Kernel output matches expected value.",
output.decode('utf-8'),
)
|
Add a test for custom struct examples.
|
Add a test for custom struct examples.
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
Add a test for custom struct examples.
|
import re
import unittest
from example_tests import example_test
class TestCustomStruct(unittest.TestCase):
def test_builtin_vectors(self):
output = example_test.run_example('custom_struct/builtin_vectors.py')
assert re.match(
r"Kernel output matches expected value.",
output.decode('utf-8')
)
def test_packed_matrix(self):
output = example_test.run_example('custom_struct/packed_matrix.py')
assert re.match(
r"Kernel output matches expected value for type 'float'.\n"
r"Kernel output matches expected value for type 'double'.",
output.decode('utf-8'),
)
def test_complex_struct(self):
output = example_test.run_example('custom_struct/complex_struct.py')
assert re.match(
r"Overall structure itemsize: \d+ bytes\n"
r"Structure members itemsize: \[(\s*\d+){5}]\n"
r"Structure members offsets: \[(\s*\d+){5}]\n"
r"Complex structure value:\n"
r"\s+\[.*\]\n"
r"Kernel output matches expected value.",
output.decode('utf-8'),
)
|
<commit_before><commit_msg>Add a test for custom struct examples.<commit_after>
|
import re
import unittest
from example_tests import example_test
class TestCustomStruct(unittest.TestCase):
def test_builtin_vectors(self):
output = example_test.run_example('custom_struct/builtin_vectors.py')
assert re.match(
r"Kernel output matches expected value.",
output.decode('utf-8')
)
def test_packed_matrix(self):
output = example_test.run_example('custom_struct/packed_matrix.py')
assert re.match(
r"Kernel output matches expected value for type 'float'.\n"
r"Kernel output matches expected value for type 'double'.",
output.decode('utf-8'),
)
def test_complex_struct(self):
output = example_test.run_example('custom_struct/complex_struct.py')
assert re.match(
r"Overall structure itemsize: \d+ bytes\n"
r"Structure members itemsize: \[(\s*\d+){5}]\n"
r"Structure members offsets: \[(\s*\d+){5}]\n"
r"Complex structure value:\n"
r"\s+\[.*\]\n"
r"Kernel output matches expected value.",
output.decode('utf-8'),
)
|
Add a test for custom struct examples.import re
import unittest
from example_tests import example_test
class TestCustomStruct(unittest.TestCase):
def test_builtin_vectors(self):
output = example_test.run_example('custom_struct/builtin_vectors.py')
assert re.match(
r"Kernel output matches expected value.",
output.decode('utf-8')
)
def test_packed_matrix(self):
output = example_test.run_example('custom_struct/packed_matrix.py')
assert re.match(
r"Kernel output matches expected value for type 'float'.\n"
r"Kernel output matches expected value for type 'double'.",
output.decode('utf-8'),
)
def test_complex_struct(self):
output = example_test.run_example('custom_struct/complex_struct.py')
assert re.match(
r"Overall structure itemsize: \d+ bytes\n"
r"Structure members itemsize: \[(\s*\d+){5}]\n"
r"Structure members offsets: \[(\s*\d+){5}]\n"
r"Complex structure value:\n"
r"\s+\[.*\]\n"
r"Kernel output matches expected value.",
output.decode('utf-8'),
)
|
<commit_before><commit_msg>Add a test for custom struct examples.<commit_after>import re
import unittest
from example_tests import example_test
class TestCustomStruct(unittest.TestCase):
def test_builtin_vectors(self):
output = example_test.run_example('custom_struct/builtin_vectors.py')
assert re.match(
r"Kernel output matches expected value.",
output.decode('utf-8')
)
def test_packed_matrix(self):
output = example_test.run_example('custom_struct/packed_matrix.py')
assert re.match(
r"Kernel output matches expected value for type 'float'.\n"
r"Kernel output matches expected value for type 'double'.",
output.decode('utf-8'),
)
def test_complex_struct(self):
output = example_test.run_example('custom_struct/complex_struct.py')
assert re.match(
r"Overall structure itemsize: \d+ bytes\n"
r"Structure members itemsize: \[(\s*\d+){5}]\n"
r"Structure members offsets: \[(\s*\d+){5}]\n"
r"Complex structure value:\n"
r"\s+\[.*\]\n"
r"Kernel output matches expected value.",
output.decode('utf-8'),
)
|
|
6881c6cdc8003f649fb9957850a789b66d92d707
|
pypi_alerts/__init__.py
|
pypi_alerts/__init__.py
|
# *-8 coding: utf-8 -*-
"""
Parse requirements file, and work out whether there are any updates.
"""
import requests
from semantic_version import Version
def package_url(package):
"""Return fully-qualified URL to package on PyPI (JSON endpoint)."""
return u"http://pypi.python.org/pypi/%s/json" % package
def package_info(package_url):
"""Return latest package version from PyPI (as JSON)."""
return requests.get(package_url).json().get('info')
def package_version(package_info):
"""Return the latest version from package_version as semver Version."""
return Version(package_info.get('version'))
class PackageVersion(object):
"""A specific version of a package."""
def __init__(self, name, version_string, **kwargs):
self.name = name
self.version_string = version_string
self.uploaded_at = kwargs.pop('uploaded_at', None)
def __unicode__(self):
return u"Package: %s (%s)" % (self.name, self.version)
def __str__(self):
return unicode(self).encode('utf-8')
@property
def version(self):
"""Return a semantic_version.Version object."""
return Version(self.version_string, partial=True)
def diff(self, package_to_compare):
"""Return string representing the diff between package versions.
We're interested in whether this is a major, minor, patch or 'other'
update. This method will compare the two versions and return None if
they are the same, else it will return a string value indicating the
type of diff - 'major', 'minor', 'patch', 'other'.
"""
version1 = self.version
version2 = package_to_compare.version
if version1 == version2:
return None
for v in ('major', 'minor', 'patch'):
if getattr(version1, v) != getattr(version2, v):
return v
return 'other'
|
Add first pass at a PackageVersion class
|
Add first pass at a PackageVersion class
|
Python
|
mit
|
yunojuno/pypi-alerts
|
Add first pass at a PackageVersion class
|
# *-8 coding: utf-8 -*-
"""
Parse requirements file, and work out whether there are any updates.
"""
import requests
from semantic_version import Version
def package_url(package):
"""Return fully-qualified URL to package on PyPI (JSON endpoint)."""
return u"http://pypi.python.org/pypi/%s/json" % package
def package_info(package_url):
"""Return latest package version from PyPI (as JSON)."""
return requests.get(package_url).json().get('info')
def package_version(package_info):
"""Return the latest version from package_version as semver Version."""
return Version(package_info.get('version'))
class PackageVersion(object):
"""A specific version of a package."""
def __init__(self, name, version_string, **kwargs):
self.name = name
self.version_string = version_string
self.uploaded_at = kwargs.pop('uploaded_at', None)
def __unicode__(self):
return u"Package: %s (%s)" % (self.name, self.version)
def __str__(self):
return unicode(self).encode('utf-8')
@property
def version(self):
"""Return a semantic_version.Version object."""
return Version(self.version_string, partial=True)
def diff(self, package_to_compare):
"""Return string representing the diff between package versions.
We're interested in whether this is a major, minor, patch or 'other'
update. This method will compare the two versions and return None if
they are the same, else it will return a string value indicating the
type of diff - 'major', 'minor', 'patch', 'other'.
"""
version1 = self.version
version2 = package_to_compare.version
if version1 == version2:
return None
for v in ('major', 'minor', 'patch'):
if getattr(version1, v) != getattr(version2, v):
return v
return 'other'
|
<commit_before><commit_msg>Add first pass at a PackageVersion class<commit_after>
|
# *-8 coding: utf-8 -*-
"""
Parse requirements file, and work out whether there are any updates.
"""
import requests
from semantic_version import Version
def package_url(package):
"""Return fully-qualified URL to package on PyPI (JSON endpoint)."""
return u"http://pypi.python.org/pypi/%s/json" % package
def package_info(package_url):
"""Return latest package version from PyPI (as JSON)."""
return requests.get(package_url).json().get('info')
def package_version(package_info):
"""Return the latest version from package_version as semver Version."""
return Version(package_info.get('version'))
class PackageVersion(object):
"""A specific version of a package."""
def __init__(self, name, version_string, **kwargs):
self.name = name
self.version_string = version_string
self.uploaded_at = kwargs.pop('uploaded_at', None)
def __unicode__(self):
return u"Package: %s (%s)" % (self.name, self.version)
def __str__(self):
return unicode(self).encode('utf-8')
@property
def version(self):
"""Return a semantic_version.Version object."""
return Version(self.version_string, partial=True)
def diff(self, package_to_compare):
"""Return string representing the diff between package versions.
We're interested in whether this is a major, minor, patch or 'other'
update. This method will compare the two versions and return None if
they are the same, else it will return a string value indicating the
type of diff - 'major', 'minor', 'patch', 'other'.
"""
version1 = self.version
version2 = package_to_compare.version
if version1 == version2:
return None
for v in ('major', 'minor', 'patch'):
if getattr(version1, v) != getattr(version2, v):
return v
return 'other'
|
Add first pass at a PackageVersion class# *-8 coding: utf-8 -*-
"""
Parse requirements file, and work out whether there are any updates.
"""
import requests
from semantic_version import Version
def package_url(package):
"""Return fully-qualified URL to package on PyPI (JSON endpoint)."""
return u"http://pypi.python.org/pypi/%s/json" % package
def package_info(package_url):
"""Return latest package version from PyPI (as JSON)."""
return requests.get(package_url).json().get('info')
def package_version(package_info):
"""Return the latest version from package_version as semver Version."""
return Version(package_info.get('version'))
class PackageVersion(object):
"""A specific version of a package."""
def __init__(self, name, version_string, **kwargs):
self.name = name
self.version_string = version_string
self.uploaded_at = kwargs.pop('uploaded_at', None)
def __unicode__(self):
return u"Package: %s (%s)" % (self.name, self.version)
def __str__(self):
return unicode(self).encode('utf-8')
@property
def version(self):
"""Return a semantic_version.Version object."""
return Version(self.version_string, partial=True)
def diff(self, package_to_compare):
"""Return string representing the diff between package versions.
We're interested in whether this is a major, minor, patch or 'other'
update. This method will compare the two versions and return None if
they are the same, else it will return a string value indicating the
type of diff - 'major', 'minor', 'patch', 'other'.
"""
version1 = self.version
version2 = package_to_compare.version
if version1 == version2:
return None
for v in ('major', 'minor', 'patch'):
if getattr(version1, v) != getattr(version2, v):
return v
return 'other'
|
<commit_before><commit_msg>Add first pass at a PackageVersion class<commit_after># *-8 coding: utf-8 -*-
"""
Parse requirements file, and work out whether there are any updates.
"""
import requests
from semantic_version import Version
def package_url(package):
"""Return fully-qualified URL to package on PyPI (JSON endpoint)."""
return u"http://pypi.python.org/pypi/%s/json" % package
def package_info(package_url):
"""Return latest package version from PyPI (as JSON)."""
return requests.get(package_url).json().get('info')
def package_version(package_info):
"""Return the latest version from package_version as semver Version."""
return Version(package_info.get('version'))
class PackageVersion(object):
"""A specific version of a package."""
def __init__(self, name, version_string, **kwargs):
self.name = name
self.version_string = version_string
self.uploaded_at = kwargs.pop('uploaded_at', None)
def __unicode__(self):
return u"Package: %s (%s)" % (self.name, self.version)
def __str__(self):
return unicode(self).encode('utf-8')
@property
def version(self):
"""Return a semantic_version.Version object."""
return Version(self.version_string, partial=True)
def diff(self, package_to_compare):
"""Return string representing the diff between package versions.
We're interested in whether this is a major, minor, patch or 'other'
update. This method will compare the two versions and return None if
they are the same, else it will return a string value indicating the
type of diff - 'major', 'minor', 'patch', 'other'.
"""
version1 = self.version
version2 = package_to_compare.version
if version1 == version2:
return None
for v in ('major', 'minor', 'patch'):
if getattr(version1, v) != getattr(version2, v):
return v
return 'other'
|
|
05fb9d24c01c40312ba1f0930e7754ae84419bc9
|
scripts/pin_deps.py
|
scripts/pin_deps.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess as sp
from tempfile import mkstemp
def skip(g, *ks):
for k, x in enumerate(g):
if k not in ks:
yield x
if __name__ == '__main__':
tmp_req_f, tmp_req_path = mkstemp()
os.close(tmp_req_f)
sp.check_call(['pip-compile', '--output-file', 'dev-requirements.txt', 'dev-requirements.in'])
sp.check_call(['pip-compile', '--output-file', tmp_req_path, 'requirements.in'])
with open(tmp_req_path) as tmp_req_f:
with open('requirements.txt', 'w') as req_f:
for line in skip(tmp_req_f, 6):
print(line.rstrip(), file=req_f)
os.remove(tmp_req_path)
sp.check_call(['pip-sync', 'dev-requirements.txt', 'requirements.txt'])
|
Add a script to pin deps
|
Add a script to pin deps
|
Python
|
mit
|
idmit/midware
|
Add a script to pin deps
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess as sp
from tempfile import mkstemp
def skip(g, *ks):
for k, x in enumerate(g):
if k not in ks:
yield x
if __name__ == '__main__':
tmp_req_f, tmp_req_path = mkstemp()
os.close(tmp_req_f)
sp.check_call(['pip-compile', '--output-file', 'dev-requirements.txt', 'dev-requirements.in'])
sp.check_call(['pip-compile', '--output-file', tmp_req_path, 'requirements.in'])
with open(tmp_req_path) as tmp_req_f:
with open('requirements.txt', 'w') as req_f:
for line in skip(tmp_req_f, 6):
print(line.rstrip(), file=req_f)
os.remove(tmp_req_path)
sp.check_call(['pip-sync', 'dev-requirements.txt', 'requirements.txt'])
|
<commit_before><commit_msg>Add a script to pin deps<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess as sp
from tempfile import mkstemp
def skip(g, *ks):
for k, x in enumerate(g):
if k not in ks:
yield x
if __name__ == '__main__':
tmp_req_f, tmp_req_path = mkstemp()
os.close(tmp_req_f)
sp.check_call(['pip-compile', '--output-file', 'dev-requirements.txt', 'dev-requirements.in'])
sp.check_call(['pip-compile', '--output-file', tmp_req_path, 'requirements.in'])
with open(tmp_req_path) as tmp_req_f:
with open('requirements.txt', 'w') as req_f:
for line in skip(tmp_req_f, 6):
print(line.rstrip(), file=req_f)
os.remove(tmp_req_path)
sp.check_call(['pip-sync', 'dev-requirements.txt', 'requirements.txt'])
|
Add a script to pin deps#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess as sp
from tempfile import mkstemp
def skip(g, *ks):
for k, x in enumerate(g):
if k not in ks:
yield x
if __name__ == '__main__':
tmp_req_f, tmp_req_path = mkstemp()
os.close(tmp_req_f)
sp.check_call(['pip-compile', '--output-file', 'dev-requirements.txt', 'dev-requirements.in'])
sp.check_call(['pip-compile', '--output-file', tmp_req_path, 'requirements.in'])
with open(tmp_req_path) as tmp_req_f:
with open('requirements.txt', 'w') as req_f:
for line in skip(tmp_req_f, 6):
print(line.rstrip(), file=req_f)
os.remove(tmp_req_path)
sp.check_call(['pip-sync', 'dev-requirements.txt', 'requirements.txt'])
|
<commit_before><commit_msg>Add a script to pin deps<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import subprocess as sp
from tempfile import mkstemp
def skip(g, *ks):
for k, x in enumerate(g):
if k not in ks:
yield x
if __name__ == '__main__':
tmp_req_f, tmp_req_path = mkstemp()
os.close(tmp_req_f)
sp.check_call(['pip-compile', '--output-file', 'dev-requirements.txt', 'dev-requirements.in'])
sp.check_call(['pip-compile', '--output-file', tmp_req_path, 'requirements.in'])
with open(tmp_req_path) as tmp_req_f:
with open('requirements.txt', 'w') as req_f:
for line in skip(tmp_req_f, 6):
print(line.rstrip(), file=req_f)
os.remove(tmp_req_path)
sp.check_call(['pip-sync', 'dev-requirements.txt', 'requirements.txt'])
|
|
6f0ab721e50cf432abb1fa0c3f315ee7b36276f5
|
future/fciqmcscf/test/test_he2_state_averaged.py
|
future/fciqmcscf/test/test_he2_state_averaged.py
|
#!/usr/bin/env python
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf import fciqmcscf
b = 1.4
mol = gto.Mole()
mol.build(
verbose = 0,
output = None,
atom = [['He',( 0.000000, 0.000000, -b/2)],
['He',( 0.000000, 0.000000, b)]],
basis = {'He': 'cc-pvdz'},
symmetry = 'C2v'
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
class KnowValues(unittest.TestCase):
def test_mc2step_7o4e_fciqmc_4states(self):
mc = mcscf.CASSCF(m, 7, 4)
mc.max_cycle_macro = 10
mc.fcisolver = fciqmcscf.FCIQMCCI(mol)
mc.fcisolver.RDMSamples = 2000
mc.fcisolver.maxwalkers = 3000
mc.fcisolver.state_weights = [0.25] * 4
emc = mc.mc2step()[0]
print('Final energy:', emc)
if __name__ == "__main__":
unittest.main()
|
Add state-averaged test (benchmarks not yet added), for testing.
|
Add state-averaged test (benchmarks not yet added), for testing.
|
Python
|
apache-2.0
|
sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf
|
Add state-averaged test (benchmarks not yet added), for testing.
|
#!/usr/bin/env python
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf import fciqmcscf
b = 1.4
mol = gto.Mole()
mol.build(
verbose = 0,
output = None,
atom = [['He',( 0.000000, 0.000000, -b/2)],
['He',( 0.000000, 0.000000, b)]],
basis = {'He': 'cc-pvdz'},
symmetry = 'C2v'
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
class KnowValues(unittest.TestCase):
def test_mc2step_7o4e_fciqmc_4states(self):
mc = mcscf.CASSCF(m, 7, 4)
mc.max_cycle_macro = 10
mc.fcisolver = fciqmcscf.FCIQMCCI(mol)
mc.fcisolver.RDMSamples = 2000
mc.fcisolver.maxwalkers = 3000
mc.fcisolver.state_weights = [0.25] * 4
emc = mc.mc2step()[0]
print('Final energy:', emc)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add state-averaged test (benchmarks not yet added), for testing.<commit_after>
|
#!/usr/bin/env python
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf import fciqmcscf
b = 1.4
mol = gto.Mole()
mol.build(
verbose = 0,
output = None,
atom = [['He',( 0.000000, 0.000000, -b/2)],
['He',( 0.000000, 0.000000, b)]],
basis = {'He': 'cc-pvdz'},
symmetry = 'C2v'
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
class KnowValues(unittest.TestCase):
def test_mc2step_7o4e_fciqmc_4states(self):
mc = mcscf.CASSCF(m, 7, 4)
mc.max_cycle_macro = 10
mc.fcisolver = fciqmcscf.FCIQMCCI(mol)
mc.fcisolver.RDMSamples = 2000
mc.fcisolver.maxwalkers = 3000
mc.fcisolver.state_weights = [0.25] * 4
emc = mc.mc2step()[0]
print('Final energy:', emc)
if __name__ == "__main__":
unittest.main()
|
Add state-averaged test (benchmarks not yet added), for testing.#!/usr/bin/env python
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf import fciqmcscf
b = 1.4
mol = gto.Mole()
mol.build(
verbose = 0,
output = None,
atom = [['He',( 0.000000, 0.000000, -b/2)],
['He',( 0.000000, 0.000000, b)]],
basis = {'He': 'cc-pvdz'},
symmetry = 'C2v'
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
class KnowValues(unittest.TestCase):
def test_mc2step_7o4e_fciqmc_4states(self):
mc = mcscf.CASSCF(m, 7, 4)
mc.max_cycle_macro = 10
mc.fcisolver = fciqmcscf.FCIQMCCI(mol)
mc.fcisolver.RDMSamples = 2000
mc.fcisolver.maxwalkers = 3000
mc.fcisolver.state_weights = [0.25] * 4
emc = mc.mc2step()[0]
print('Final energy:', emc)
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Add state-averaged test (benchmarks not yet added), for testing.<commit_after>#!/usr/bin/env python
import unittest
import numpy
from pyscf import gto
from pyscf import scf
from pyscf import mcscf
from pyscf import fciqmcscf
b = 1.4
mol = gto.Mole()
mol.build(
verbose = 0,
output = None,
atom = [['He',( 0.000000, 0.000000, -b/2)],
['He',( 0.000000, 0.000000, b)]],
basis = {'He': 'cc-pvdz'},
symmetry = 'C2v'
)
m = scf.RHF(mol)
m.conv_tol = 1e-9
m.scf()
class KnowValues(unittest.TestCase):
def test_mc2step_7o4e_fciqmc_4states(self):
mc = mcscf.CASSCF(m, 7, 4)
mc.max_cycle_macro = 10
mc.fcisolver = fciqmcscf.FCIQMCCI(mol)
mc.fcisolver.RDMSamples = 2000
mc.fcisolver.maxwalkers = 3000
mc.fcisolver.state_weights = [0.25] * 4
emc = mc.mc2step()[0]
print('Final energy:', emc)
if __name__ == "__main__":
unittest.main()
|
|
28da05d860147b5e0df37d998f437af6a5d4d178
|
airflow/hooks/postgres_hook.py
|
airflow/hooks/postgres_hook.py
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = True
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
return psycopg2.connect(**conn_args)
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = False
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
psycopg2_conn = psycopg2.connect(**conn_args)
if psycopg2_conn.server_version < 70400:
self.supports_autocommit = True
return psycopg2_conn
|
Set Postgres autocommit as supported only if server version is < 7.4
|
Set Postgres autocommit as supported only if server version is < 7.4
The server-side autocommit setting was removed here http://www.postgresql.org/docs/7.4/static/release-7-4.html
Resolves: #690
|
Python
|
apache-2.0
|
vijaysbhat/incubator-airflow,dgies/incubator-airflow,owlabs/incubator-airflow,Acehaidrey/incubator-airflow,gtoonstra/airflow,hamedhsn/incubator-airflow,apache/airflow,N3da/incubator-airflow,mrkm4ntr/incubator-airflow,RealImpactAnalytics/airflow,r39132/airflow,janczak10/incubator-airflow,cfei18/incubator-airflow,CloverHealth/airflow,jhsenjaliya/incubator-airflow,Acehaidrey/incubator-airflow,DEVELByte/incubator-airflow,andrewmchen/incubator-airflow,cfei18/incubator-airflow,wileeam/airflow,mtdewulf/incubator-airflow,DEVELByte/incubator-airflow,nathanielvarona/airflow,MortalViews/incubator-airflow,nathanielvarona/airflow,forevernull/incubator-airflow,zodiac/incubator-airflow,nathanielvarona/airflow,gtoonstra/airflow,sid88in/incubator-airflow,dud225/incubator-airflow,danielvdende/incubator-airflow,jwi078/incubator-airflow,aminghadersohi/airflow,ty707/airflow,preete-dixit-ck/incubator-airflow,mrkm4ntr/incubator-airflow,kerzhner/airflow,caseyching/incubator-airflow,mattuuh7/incubator-airflow,dmitry-r/incubator-airflow,r39132/airflow,biln/airflow,AllisonWang/incubator-airflow,andrewmchen/incubator-airflow,aminghadersohi/airflow,holygits/incubator-airflow,adrpar/incubator-airflow,yati-sagade/incubator-airflow,adrpar/incubator-airflow,brandsoulmates/incubator-airflow,jfantom/incubator-airflow,skudriashev/incubator-airflow,adamhaney/airflow,sid88in/incubator-airflow,juvoinc/airflow,asnir/airflow,akosel/incubator-airflow,jgao54/airflow,adamhaney/airflow,malmiron/incubator-airflow,cfei18/incubator-airflow,wolfier/incubator-airflow,ledsusop/airflow,mattuuh7/incubator-airflow,dhuang/incubator-airflow,jbhsieh/incubator-airflow,jhsenjaliya/incubator-airflow,gilt/incubator-airflow,cfei18/incubator-airflow,mrares/incubator-airflow,Fokko/incubator-airflow,adamhaney/airflow,biln/airflow,kerzhner/airflow,yiqingj/airflow,spektom/incubator-airflow,subodhchhabra/airflow,zoyahav/incubator-airflow,plypaul/airflow,dud225/incubator-airflow,jgao54/airflow,jbhsieh/incubator-airflow,ledsusop/airflow,holygits/incubator-airflow,vineet-rh/incubator-airflow,cjqian/incubator-airflow,mrkm4ntr/incubator-airflow,stverhae/incubator-airflow,mistercrunch/airflow,caseyching/incubator-airflow,wndhydrnt/airflow,janczak10/incubator-airflow,mtdewulf/incubator-airflow,RealImpactAnalytics/airflow,wooga/airflow,stverhae/incubator-airflow,cfei18/incubator-airflow,cademarkegard/airflow,wolfier/incubator-airflow,fenglu-g/incubator-airflow,ronfung/incubator-airflow,holygits/incubator-airflow,AllisonWang/incubator-airflow,wxiang7/airflow,juvoinc/airflow,jwi078/incubator-airflow,mylons/incubator-airflow,mylons/incubator-airflow,wooga/airflow,easytaxibr/airflow,dgies/incubator-airflow,AllisonWang/incubator-airflow,Tagar/incubator-airflow,vijaysbhat/incubator-airflow,asnir/airflow,ledsusop/airflow,asnir/airflow,aminghadersohi/airflow,sdiazb/airflow,jesusfcr/airflow,NielsZeilemaker/incubator-airflow,malmiron/incubator-airflow,wileeam/airflow,kerzhner/airflow,jfantom/incubator-airflow,NielsZeilemaker/incubator-airflow,bolkedebruin/airflow,RealImpactAnalytics/airflow,mtdewulf/incubator-airflow,criccomini/airflow,Tagar/incubator-airflow,mrares/incubator-airflow,jbhsieh/incubator-airflow,janczak10/incubator-airflow,akosel/incubator-airflow,airbnb/airflow,lxneng/incubator-airflow,cjqian/incubator-airflow,janczak10/incubator-airflow,hgrif/incubator-airflow,brandsoulmates/incubator-airflow,jfantom/incubator-airflow,MetrodataTeam/incubator-airflow,modsy/incubator-airflow,stverhae/incubator-airflow,stverhae/incubator-airflow,sekikn/incubator-airflow,KL-WLCR/incubator-airflow,Acehaidrey/incubator-airflow,biln/airflow,dud225/incubator-airflow,danielvdende/incubator-airflow,cjqian/incubator-airflow,jbhsieh/incubator-airflow,cademarkegard/airflow,mtagle/airflow,fenglu-g/incubator-airflow,akosel/incubator-airflow,saguziel/incubator-airflow,MortalViews/incubator-airflow,subodhchhabra/airflow,gritlogic/incubator-airflow,wxiang7/airflow,mattuuh7/incubator-airflow,spektom/incubator-airflow,jesusfcr/airflow,sergiohgz/incubator-airflow,caseyching/incubator-airflow,mylons/incubator-airflow,NielsZeilemaker/incubator-airflow,sdiazb/airflow,artwr/airflow,alexvanboxel/airflow,Twistbioscience/incubator-airflow,CloverHealth/airflow,yk5/incubator-airflow,hgrif/incubator-airflow,wolfier/incubator-airflow,preete-dixit-ck/incubator-airflow,d-lee/airflow,rishibarve/incubator-airflow,Acehaidrey/incubator-airflow,yiqingj/airflow,cjqian/incubator-airflow,gilt/incubator-airflow,adrpar/incubator-airflow,yiqingj/airflow,danielvdende/incubator-airflow,sergiohgz/incubator-airflow,sekikn/incubator-airflow,gtoonstra/airflow,jwi078/incubator-airflow,zoyahav/incubator-airflow,adrpar/incubator-airflow,preete-dixit-ck/incubator-airflow,sid88in/incubator-airflow,apache/airflow,Acehaidrey/incubator-airflow,owlabs/incubator-airflow,zodiac/incubator-airflow,danielvdende/incubator-airflow,mattuuh7/incubator-airflow,skudriashev/incubator-airflow,preete-dixit-ck/incubator-airflow,mtdewulf/incubator-airflow,mistercrunch/airflow,mistercrunch/airflow,Fokko/incubator-airflow,sdiazb/airflow,danielvdende/incubator-airflow,OpringaoDoTurno/airflow,kerzhner/airflow,skudriashev/incubator-airflow,fenglu-g/incubator-airflow,jgao54/airflow,wileeam/airflow,DEVELByte/incubator-airflow,lyft/incubator-airflow,malmiron/incubator-airflow,malmiron/incubator-airflow,MortalViews/incubator-airflow,zack3241/incubator-airflow,dmitry-r/incubator-airflow,dgies/incubator-airflow,OpringaoDoTurno/airflow,edgarRd/incubator-airflow,juvoinc/airflow,yiqingj/airflow,ProstoMaxim/incubator-airflow,lyft/incubator-airflow,wileeam/airflow,hgrif/incubator-airflow,jlowin/airflow,apache/airflow,easytaxibr/airflow,mistercrunch/airflow,gritlogic/incubator-airflow,dgies/incubator-airflow,dhuang/incubator-airflow,DinoCow/airflow,zoyahav/incubator-airflow,d-lee/airflow,owlabs/incubator-airflow,ty707/airflow,btallman/incubator-airflow,jiwang576/incubator-airflow,AllisonWang/incubator-airflow,sekikn/incubator-airflow,easytaxibr/airflow,hamedhsn/incubator-airflow,jesusfcr/airflow,saguziel/incubator-airflow,ledsusop/airflow,wndhydrnt/airflow,btallman/incubator-airflow,ProstoMaxim/incubator-airflow,ProstoMaxim/incubator-airflow,andyxhadji/incubator-airflow,btallman/incubator-airflow,ronfung/incubator-airflow,nathanielvarona/airflow,andyxhadji/incubator-airflow,edgarRd/incubator-airflow,plypaul/airflow,airbnb/airflow,jesusfcr/airflow,wooga/airflow,apache/airflow,wooga/airflow,yk5/incubator-airflow,apache/airflow,subodhchhabra/airflow,mtagle/airflow,DinoCow/airflow,Acehaidrey/incubator-airflow,lxneng/incubator-airflow,gritlogic/incubator-airflow,alexvanboxel/airflow,zack3241/incubator-airflow,apache/incubator-airflow,jgao54/airflow,mrkm4ntr/incubator-airflow,gilt/incubator-airflow,brandsoulmates/incubator-airflow,criccomini/airflow,mtagle/airflow,modsy/incubator-airflow,artwr/airflow,mylons/incubator-airflow,yk5/incubator-airflow,apache/incubator-airflow,forevernull/incubator-airflow,cademarkegard/airflow,MortalViews/incubator-airflow,cademarkegard/airflow,ronfung/incubator-airflow,forevernull/incubator-airflow,ty707/airflow,KL-WLCR/incubator-airflow,lyft/incubator-airflow,cfei18/incubator-airflow,DinoCow/airflow,spektom/incubator-airflow,danielvdende/incubator-airflow,yati-sagade/incubator-airflow,Twistbioscience/incubator-airflow,mrares/incubator-airflow,jhsenjaliya/incubator-airflow,btallman/incubator-airflow,zoyahav/incubator-airflow,CloverHealth/airflow,biln/airflow,r39132/airflow,plypaul/airflow,hgrif/incubator-airflow,MetrodataTeam/incubator-airflow,hamedhsn/incubator-airflow,sekikn/incubator-airflow,Twistbioscience/incubator-airflow,dmitry-r/incubator-airflow,vineet-rh/incubator-airflow,yk5/incubator-airflow,wndhydrnt/airflow,mrares/incubator-airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,ProstoMaxim/incubator-airflow,jfantom/incubator-airflow,apache/incubator-airflow,sergiohgz/incubator-airflow,gtoonstra/airflow,modsy/incubator-airflow,airbnb/airflow,edgarRd/incubator-airflow,zack3241/incubator-airflow,vineet-rh/incubator-airflow,yati-sagade/incubator-airflow,airbnb/airflow,jiwang576/incubator-airflow,jlowin/airflow,aminghadersohi/airflow,wolfier/incubator-airflow,d-lee/airflow,MetrodataTeam/incubator-airflow,criccomini/airflow,zack3241/incubator-airflow,asnir/airflow,apache/incubator-airflow,wxiang7/airflow,rishibarve/incubator-airflow,alexvanboxel/airflow,OpringaoDoTurno/airflow,juvoinc/airflow,bolkedebruin/airflow,jiwang576/incubator-airflow,Fokko/incubator-airflow,artwr/airflow,CloverHealth/airflow,lxneng/incubator-airflow,plypaul/airflow,mtagle/airflow,wndhydrnt/airflow,sergiohgz/incubator-airflow,N3da/incubator-airflow,MetrodataTeam/incubator-airflow,saguziel/incubator-airflow,jlowin/airflow,fenglu-g/incubator-airflow,Tagar/incubator-airflow,spektom/incubator-airflow,holygits/incubator-airflow,vijaysbhat/incubator-airflow,skudriashev/incubator-airflow,hamedhsn/incubator-airflow,DinoCow/airflow,artwr/airflow,rishibarve/incubator-airflow,wxiang7/airflow,nathanielvarona/airflow,jiwang576/incubator-airflow,sdiazb/airflow,lyft/incubator-airflow,ronfung/incubator-airflow,r39132/airflow,KL-WLCR/incubator-airflow,criccomini/airflow,N3da/incubator-airflow,jwi078/incubator-airflow,caseyching/incubator-airflow,saguziel/incubator-airflow,modsy/incubator-airflow,zodiac/incubator-airflow,RealImpactAnalytics/airflow,Tagar/incubator-airflow,jlowin/airflow,nathanielvarona/airflow,apache/airflow,bolkedebruin/airflow,brandsoulmates/incubator-airflow,rishibarve/incubator-airflow,subodhchhabra/airflow,NielsZeilemaker/incubator-airflow,easytaxibr/airflow,Twistbioscience/incubator-airflow,bolkedebruin/airflow,forevernull/incubator-airflow,adamhaney/airflow,vineet-rh/incubator-airflow,ty707/airflow,yati-sagade/incubator-airflow,andyxhadji/incubator-airflow,andrewmchen/incubator-airflow,akosel/incubator-airflow,andrewmchen/incubator-airflow,dhuang/incubator-airflow,N3da/incubator-airflow,owlabs/incubator-airflow,andyxhadji/incubator-airflow,d-lee/airflow,DEVELByte/incubator-airflow,dud225/incubator-airflow,dhuang/incubator-airflow,sid88in/incubator-airflow,vijaysbhat/incubator-airflow,alexvanboxel/airflow,gritlogic/incubator-airflow,gilt/incubator-airflow,lxneng/incubator-airflow,edgarRd/incubator-airflow,KL-WLCR/incubator-airflow,jhsenjaliya/incubator-airflow,zodiac/incubator-airflow,bolkedebruin/airflow,OpringaoDoTurno/airflow
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = True
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
return psycopg2.connect(**conn_args)
Set Postgres autocommit as supported only if server version is < 7.4
The server-side autocommit setting was removed here http://www.postgresql.org/docs/7.4/static/release-7-4.html
Resolves: #690
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = False
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
psycopg2_conn = psycopg2.connect(**conn_args)
if psycopg2_conn.server_version < 70400:
self.supports_autocommit = True
return psycopg2_conn
|
<commit_before>import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = True
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
return psycopg2.connect(**conn_args)
<commit_msg>Set Postgres autocommit as supported only if server version is < 7.4
The server-side autocommit setting was removed here http://www.postgresql.org/docs/7.4/static/release-7-4.html
Resolves: #690<commit_after>
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = False
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
psycopg2_conn = psycopg2.connect(**conn_args)
if psycopg2_conn.server_version < 70400:
self.supports_autocommit = True
return psycopg2_conn
|
import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = True
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
return psycopg2.connect(**conn_args)
Set Postgres autocommit as supported only if server version is < 7.4
The server-side autocommit setting was removed here http://www.postgresql.org/docs/7.4/static/release-7-4.html
Resolves: #690import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = False
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
psycopg2_conn = psycopg2.connect(**conn_args)
if psycopg2_conn.server_version < 70400:
self.supports_autocommit = True
return psycopg2_conn
|
<commit_before>import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = True
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
return psycopg2.connect(**conn_args)
<commit_msg>Set Postgres autocommit as supported only if server version is < 7.4
The server-side autocommit setting was removed here http://www.postgresql.org/docs/7.4/static/release-7-4.html
Resolves: #690<commit_after>import psycopg2
from airflow.hooks.dbapi_hook import DbApiHook
class PostgresHook(DbApiHook):
'''
Interact with Postgres.
You can specify ssl parameters in the extra field of your connection
as ``{"sslmode": "require", "sslcert": "/path/to/cert.pem", etc}``.
'''
conn_name_attr = 'postgres_conn_id'
default_conn_name = 'postgres_default'
supports_autocommit = False
def get_conn(self):
conn = self.get_connection(self.postgres_conn_id)
conn_args = dict(
host=conn.host,
user=conn.login,
password=conn.password,
dbname=conn.schema,
port=conn.port)
# check for ssl parameters in conn.extra
for arg_name, arg_val in conn.extra_dejson.items():
if arg_name in ['sslmode', 'sslcert', 'sslkey', 'sslrootcert', 'sslcrl']:
conn_args[arg_name] = arg_val
psycopg2_conn = psycopg2.connect(**conn_args)
if psycopg2_conn.server_version < 70400:
self.supports_autocommit = True
return psycopg2_conn
|
9139a2efc445887f59b99052f1ffd05c98ee2c72
|
tests/test_reporter.py
|
tests/test_reporter.py
|
"""Test the Reporter base class."""
import pytest
@pytest.fixture
def klass():
"""Return the CUT."""
from agile_analytics.reporters import Reporter
return Reporter
def test_klass(klass):
"""Ensure the CUT exists."""
assert klass
@pytest.fixture
def instance(klass, days_ago):
"""Return a pre-init'd CUT."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
return k
def test_init(klass, days_ago):
"""Verify we can init it correctly."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert k
assert k.start_date == a_month_ago
assert k.end_date == now
def test_valid_start_date(klass, days_ago):
"""Verify valid_start_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert now == k.valid_start_date(now)
def test_valid_end_date(klass, days_ago):
"""Verify valid_end_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert a_month_ago == k.valid_end_date(a_month_ago)
def test_filter_issues(instance):
"""Verify that filter_issues raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.filter_issues([])
def test_report_on(instance):
"""Verify that report_on raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.report_on([])
|
Add tests for base class.
|
Add tests for base class.
|
Python
|
mit
|
cmheisel/agile-analytics
|
Add tests for base class.
|
"""Test the Reporter base class."""
import pytest
@pytest.fixture
def klass():
"""Return the CUT."""
from agile_analytics.reporters import Reporter
return Reporter
def test_klass(klass):
"""Ensure the CUT exists."""
assert klass
@pytest.fixture
def instance(klass, days_ago):
"""Return a pre-init'd CUT."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
return k
def test_init(klass, days_ago):
"""Verify we can init it correctly."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert k
assert k.start_date == a_month_ago
assert k.end_date == now
def test_valid_start_date(klass, days_ago):
"""Verify valid_start_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert now == k.valid_start_date(now)
def test_valid_end_date(klass, days_ago):
"""Verify valid_end_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert a_month_ago == k.valid_end_date(a_month_ago)
def test_filter_issues(instance):
"""Verify that filter_issues raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.filter_issues([])
def test_report_on(instance):
"""Verify that report_on raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.report_on([])
|
<commit_before><commit_msg>Add tests for base class.<commit_after>
|
"""Test the Reporter base class."""
import pytest
@pytest.fixture
def klass():
"""Return the CUT."""
from agile_analytics.reporters import Reporter
return Reporter
def test_klass(klass):
"""Ensure the CUT exists."""
assert klass
@pytest.fixture
def instance(klass, days_ago):
"""Return a pre-init'd CUT."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
return k
def test_init(klass, days_ago):
"""Verify we can init it correctly."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert k
assert k.start_date == a_month_ago
assert k.end_date == now
def test_valid_start_date(klass, days_ago):
"""Verify valid_start_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert now == k.valid_start_date(now)
def test_valid_end_date(klass, days_ago):
"""Verify valid_end_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert a_month_ago == k.valid_end_date(a_month_ago)
def test_filter_issues(instance):
"""Verify that filter_issues raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.filter_issues([])
def test_report_on(instance):
"""Verify that report_on raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.report_on([])
|
Add tests for base class."""Test the Reporter base class."""
import pytest
@pytest.fixture
def klass():
"""Return the CUT."""
from agile_analytics.reporters import Reporter
return Reporter
def test_klass(klass):
"""Ensure the CUT exists."""
assert klass
@pytest.fixture
def instance(klass, days_ago):
"""Return a pre-init'd CUT."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
return k
def test_init(klass, days_ago):
"""Verify we can init it correctly."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert k
assert k.start_date == a_month_ago
assert k.end_date == now
def test_valid_start_date(klass, days_ago):
"""Verify valid_start_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert now == k.valid_start_date(now)
def test_valid_end_date(klass, days_ago):
"""Verify valid_end_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert a_month_ago == k.valid_end_date(a_month_ago)
def test_filter_issues(instance):
"""Verify that filter_issues raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.filter_issues([])
def test_report_on(instance):
"""Verify that report_on raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.report_on([])
|
<commit_before><commit_msg>Add tests for base class.<commit_after>"""Test the Reporter base class."""
import pytest
@pytest.fixture
def klass():
"""Return the CUT."""
from agile_analytics.reporters import Reporter
return Reporter
def test_klass(klass):
"""Ensure the CUT exists."""
assert klass
@pytest.fixture
def instance(klass, days_ago):
"""Return a pre-init'd CUT."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
return k
def test_init(klass, days_ago):
"""Verify we can init it correctly."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert k
assert k.start_date == a_month_ago
assert k.end_date == now
def test_valid_start_date(klass, days_ago):
"""Verify valid_start_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert now == k.valid_start_date(now)
def test_valid_end_date(klass, days_ago):
"""Verify valid_end_date returns whatever is passed."""
now = days_ago(0)
a_month_ago = days_ago(30)
k = klass(title="Foo", start_date=a_month_ago, end_date=now)
assert a_month_ago == k.valid_end_date(a_month_ago)
def test_filter_issues(instance):
"""Verify that filter_issues raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.filter_issues([])
def test_report_on(instance):
"""Verify that report_on raises NotImplementedError."""
with pytest.raises(NotImplementedError):
instance.report_on([])
|
|
907b626743e0a0a48bc47580f269618efdb4b399
|
skidl/logger.py
|
skidl/logger.py
|
# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2019 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Logging for generic messages and ERC.
"""
import logging
import sys
from .utilities import get_script_name
class CountCalls(object):
"""Decorator for counting the number of times a function is called.
This is used for counting errors and warnings passed to logging functions,
making it easy to track if and how many errors/warnings were issued.
"""
def __init__(self, func):
self.func = func
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
return self.func(*args, **kwargs)
def reset(self):
self.count = 0
def _create_logger(title, log_msg_id="", log_file_suffix=".log"):
"""
Create a logger, usually for run-time errors or ERC violations.
"""
logger = logging.getLogger(title)
# Errors & warnings always appear on the terminal.
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Errors and warnings are stored in a log file with the top-level script's name.
handler = logging.StreamHandler(open(get_script_name() + log_file_suffix, "w"))
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Set logger to trigger on info, warning, and error messages.
logger.setLevel(logging.INFO)
# Augment the logger's functions to count the number of errors and warnings.
logger.error = CountCalls(logger.error)
logger.warning = CountCalls(logger.warning)
return logger
###############################################################################
# Set up loggers for runtime messages and ERC reports.
logger = _create_logger("skidl")
erc_logger = _create_logger("ERC_Logger", "ERC ", ".erc")
###############################################################################
|
Split logging functions from utilities.py into its own file.
|
Split logging functions from utilities.py into its own file.
|
Python
|
mit
|
xesscorp/skidl,xesscorp/skidl
|
Split logging functions from utilities.py into its own file.
|
# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2019 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Logging for generic messages and ERC.
"""
import logging
import sys
from .utilities import get_script_name
class CountCalls(object):
"""Decorator for counting the number of times a function is called.
This is used for counting errors and warnings passed to logging functions,
making it easy to track if and how many errors/warnings were issued.
"""
def __init__(self, func):
self.func = func
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
return self.func(*args, **kwargs)
def reset(self):
self.count = 0
def _create_logger(title, log_msg_id="", log_file_suffix=".log"):
"""
Create a logger, usually for run-time errors or ERC violations.
"""
logger = logging.getLogger(title)
# Errors & warnings always appear on the terminal.
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Errors and warnings are stored in a log file with the top-level script's name.
handler = logging.StreamHandler(open(get_script_name() + log_file_suffix, "w"))
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Set logger to trigger on info, warning, and error messages.
logger.setLevel(logging.INFO)
# Augment the logger's functions to count the number of errors and warnings.
logger.error = CountCalls(logger.error)
logger.warning = CountCalls(logger.warning)
return logger
###############################################################################
# Set up loggers for runtime messages and ERC reports.
logger = _create_logger("skidl")
erc_logger = _create_logger("ERC_Logger", "ERC ", ".erc")
###############################################################################
|
<commit_before><commit_msg>Split logging functions from utilities.py into its own file.<commit_after>
|
# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2019 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Logging for generic messages and ERC.
"""
import logging
import sys
from .utilities import get_script_name
class CountCalls(object):
"""Decorator for counting the number of times a function is called.
This is used for counting errors and warnings passed to logging functions,
making it easy to track if and how many errors/warnings were issued.
"""
def __init__(self, func):
self.func = func
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
return self.func(*args, **kwargs)
def reset(self):
self.count = 0
def _create_logger(title, log_msg_id="", log_file_suffix=".log"):
"""
Create a logger, usually for run-time errors or ERC violations.
"""
logger = logging.getLogger(title)
# Errors & warnings always appear on the terminal.
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Errors and warnings are stored in a log file with the top-level script's name.
handler = logging.StreamHandler(open(get_script_name() + log_file_suffix, "w"))
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Set logger to trigger on info, warning, and error messages.
logger.setLevel(logging.INFO)
# Augment the logger's functions to count the number of errors and warnings.
logger.error = CountCalls(logger.error)
logger.warning = CountCalls(logger.warning)
return logger
###############################################################################
# Set up loggers for runtime messages and ERC reports.
logger = _create_logger("skidl")
erc_logger = _create_logger("ERC_Logger", "ERC ", ".erc")
###############################################################################
|
Split logging functions from utilities.py into its own file.# -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2019 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Logging for generic messages and ERC.
"""
import logging
import sys
from .utilities import get_script_name
class CountCalls(object):
"""Decorator for counting the number of times a function is called.
This is used for counting errors and warnings passed to logging functions,
making it easy to track if and how many errors/warnings were issued.
"""
def __init__(self, func):
self.func = func
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
return self.func(*args, **kwargs)
def reset(self):
self.count = 0
def _create_logger(title, log_msg_id="", log_file_suffix=".log"):
"""
Create a logger, usually for run-time errors or ERC violations.
"""
logger = logging.getLogger(title)
# Errors & warnings always appear on the terminal.
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Errors and warnings are stored in a log file with the top-level script's name.
handler = logging.StreamHandler(open(get_script_name() + log_file_suffix, "w"))
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Set logger to trigger on info, warning, and error messages.
logger.setLevel(logging.INFO)
# Augment the logger's functions to count the number of errors and warnings.
logger.error = CountCalls(logger.error)
logger.warning = CountCalls(logger.warning)
return logger
###############################################################################
# Set up loggers for runtime messages and ERC reports.
logger = _create_logger("skidl")
erc_logger = _create_logger("ERC_Logger", "ERC ", ".erc")
###############################################################################
|
<commit_before><commit_msg>Split logging functions from utilities.py into its own file.<commit_after># -*- coding: utf-8 -*-
# MIT license
#
# Copyright (C) 2019 by XESS Corp.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Logging for generic messages and ERC.
"""
import logging
import sys
from .utilities import get_script_name
class CountCalls(object):
"""Decorator for counting the number of times a function is called.
This is used for counting errors and warnings passed to logging functions,
making it easy to track if and how many errors/warnings were issued.
"""
def __init__(self, func):
self.func = func
self.count = 0
def __call__(self, *args, **kwargs):
self.count += 1
return self.func(*args, **kwargs)
def reset(self):
self.count = 0
def _create_logger(title, log_msg_id="", log_file_suffix=".log"):
"""
Create a logger, usually for run-time errors or ERC violations.
"""
logger = logging.getLogger(title)
# Errors & warnings always appear on the terminal.
handler = logging.StreamHandler(sys.stderr)
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Errors and warnings are stored in a log file with the top-level script's name.
handler = logging.StreamHandler(open(get_script_name() + log_file_suffix, "w"))
handler.setLevel(logging.WARNING)
handler.setFormatter(logging.Formatter(log_msg_id + "%(levelname)s: %(message)s"))
logger.addHandler(handler)
# Set logger to trigger on info, warning, and error messages.
logger.setLevel(logging.INFO)
# Augment the logger's functions to count the number of errors and warnings.
logger.error = CountCalls(logger.error)
logger.warning = CountCalls(logger.warning)
return logger
###############################################################################
# Set up loggers for runtime messages and ERC reports.
logger = _create_logger("skidl")
erc_logger = _create_logger("ERC_Logger", "ERC ", ".erc")
###############################################################################
|
|
d7dc4142957cb81fa864677aa0a5b9d37b615778
|
download.py
|
download.py
|
import csv
import json
import sys
import tqdm
from cordis_scraper import get_project
dataset_path = sys.argv[1]
output_path = sys.argv[2]
with open(dataset_path, 'r') as dataset_file:
record_count = sum(1 for line in dataset_file)
dataset_file = open(dataset_path, 'r')
output_path = open(output_path, 'w')
dataset_reader = csv.reader(dataset_file, delimiter=';')
next(dataset_reader) # dump header
for row in tqdm.tqdm(dataset_reader, total=record_count):
rcn = row[0]
project = get_project(rcn)
project['rcn'] = rcn
json_line = json.dumps(project)
output_path.write(json_line + '\n')
dataset_file.close()
output_path.close()
|
Write a script to get the details for all records in the H2020 dump
|
Write a script to get the details for all records in the H2020 dump
|
Python
|
mit
|
oersted/cordis-scraper
|
Write a script to get the details for all records in the H2020 dump
|
import csv
import json
import sys
import tqdm
from cordis_scraper import get_project
dataset_path = sys.argv[1]
output_path = sys.argv[2]
with open(dataset_path, 'r') as dataset_file:
record_count = sum(1 for line in dataset_file)
dataset_file = open(dataset_path, 'r')
output_path = open(output_path, 'w')
dataset_reader = csv.reader(dataset_file, delimiter=';')
next(dataset_reader) # dump header
for row in tqdm.tqdm(dataset_reader, total=record_count):
rcn = row[0]
project = get_project(rcn)
project['rcn'] = rcn
json_line = json.dumps(project)
output_path.write(json_line + '\n')
dataset_file.close()
output_path.close()
|
<commit_before><commit_msg>Write a script to get the details for all records in the H2020 dump<commit_after>
|
import csv
import json
import sys
import tqdm
from cordis_scraper import get_project
dataset_path = sys.argv[1]
output_path = sys.argv[2]
with open(dataset_path, 'r') as dataset_file:
record_count = sum(1 for line in dataset_file)
dataset_file = open(dataset_path, 'r')
output_path = open(output_path, 'w')
dataset_reader = csv.reader(dataset_file, delimiter=';')
next(dataset_reader) # dump header
for row in tqdm.tqdm(dataset_reader, total=record_count):
rcn = row[0]
project = get_project(rcn)
project['rcn'] = rcn
json_line = json.dumps(project)
output_path.write(json_line + '\n')
dataset_file.close()
output_path.close()
|
Write a script to get the details for all records in the H2020 dumpimport csv
import json
import sys
import tqdm
from cordis_scraper import get_project
dataset_path = sys.argv[1]
output_path = sys.argv[2]
with open(dataset_path, 'r') as dataset_file:
record_count = sum(1 for line in dataset_file)
dataset_file = open(dataset_path, 'r')
output_path = open(output_path, 'w')
dataset_reader = csv.reader(dataset_file, delimiter=';')
next(dataset_reader) # dump header
for row in tqdm.tqdm(dataset_reader, total=record_count):
rcn = row[0]
project = get_project(rcn)
project['rcn'] = rcn
json_line = json.dumps(project)
output_path.write(json_line + '\n')
dataset_file.close()
output_path.close()
|
<commit_before><commit_msg>Write a script to get the details for all records in the H2020 dump<commit_after>import csv
import json
import sys
import tqdm
from cordis_scraper import get_project
dataset_path = sys.argv[1]
output_path = sys.argv[2]
with open(dataset_path, 'r') as dataset_file:
record_count = sum(1 for line in dataset_file)
dataset_file = open(dataset_path, 'r')
output_path = open(output_path, 'w')
dataset_reader = csv.reader(dataset_file, delimiter=';')
next(dataset_reader) # dump header
for row in tqdm.tqdm(dataset_reader, total=record_count):
rcn = row[0]
project = get_project(rcn)
project['rcn'] = rcn
json_line = json.dumps(project)
output_path.write(json_line + '\n')
dataset_file.close()
output_path.close()
|
|
df89f96113d73017a9e18964bfd456b06a2e2a6d
|
jsk_apc2015_common/scripts/create_mask_applied_dataset.py
|
jsk_apc2015_common/scripts/create_mask_applied_dataset.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
|
Add script to create mask applied dataset
|
Add script to create mask applied dataset
|
Python
|
bsd-3-clause
|
pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc,pazeshun/jsk_apc
|
Add script to create mask applied dataset
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
|
<commit_before><commit_msg>Add script to create mask applied dataset<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
|
Add script to create mask applied dataset#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
|
<commit_before><commit_msg>Add script to create mask applied dataset<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import os
import re
import cv2
from jsk_recognition_utils import bounding_rect_of_mask
parser = argparse.ArgumentParser()
parser.add_argument('container_path')
args = parser.parse_args()
container_path = args.container_path
output_dir = os.path.abspath(container_path + '_mask_applied')
if not os.path.exists(output_dir):
print('creating output directory: {}'.format(output_dir))
os.mkdir(output_dir)
categs = os.listdir(container_path)
os.chdir(container_path)
for categ in categs:
os.chdir(categ)
print('processing category: {}'.format(categ))
files = os.listdir('.')
img_files = filter(lambda x: re.match('^N\d*?_\d*?.jpg', x), files)
print('found {} images'.format(len(img_files)))
categ_output_dir = os.path.join(output_dir, categ)
if not os.path.exists(categ_output_dir):
os.mkdir(categ_output_dir)
for img_file in img_files:
base, _ = os.path.splitext(img_file)
mask_file = os.path.join('masks', base + '_mask.pbm')
img = cv2.imread(img_file)
mask = cv2.imread(mask_file, 0)
applied = bounding_rect_of_mask(img, ~mask)
cv2.imwrite(os.path.join(output_dir, categ, img_file), applied)
os.chdir('..')
os.chdir('..')
|
|
aecb39860f8fcc20471c4b1b1b959e9be7e42d70
|
euler030.py
|
euler030.py
|
#!/usr/bin/python
power_sum = 0
# Upper limit is a 6 digits number
for i in range(2, (9 ** 5) * 6):
# I'm loving Python sugar
if sum(int(j) ** 5 for j in str(i)) == i:
# If you want to see the numbers
# print(i)
power_sum += i
print(power_sum)
|
Add solution for problem 30
|
Add solution for problem 30
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 30
|
#!/usr/bin/python
power_sum = 0
# Upper limit is a 6 digits number
for i in range(2, (9 ** 5) * 6):
# I'm loving Python sugar
if sum(int(j) ** 5 for j in str(i)) == i:
# If you want to see the numbers
# print(i)
power_sum += i
print(power_sum)
|
<commit_before><commit_msg>Add solution for problem 30<commit_after>
|
#!/usr/bin/python
power_sum = 0
# Upper limit is a 6 digits number
for i in range(2, (9 ** 5) * 6):
# I'm loving Python sugar
if sum(int(j) ** 5 for j in str(i)) == i:
# If you want to see the numbers
# print(i)
power_sum += i
print(power_sum)
|
Add solution for problem 30#!/usr/bin/python
power_sum = 0
# Upper limit is a 6 digits number
for i in range(2, (9 ** 5) * 6):
# I'm loving Python sugar
if sum(int(j) ** 5 for j in str(i)) == i:
# If you want to see the numbers
# print(i)
power_sum += i
print(power_sum)
|
<commit_before><commit_msg>Add solution for problem 30<commit_after>#!/usr/bin/python
power_sum = 0
# Upper limit is a 6 digits number
for i in range(2, (9 ** 5) * 6):
# I'm loving Python sugar
if sum(int(j) ** 5 for j in str(i)) == i:
# If you want to see the numbers
# print(i)
power_sum += i
print(power_sum)
|
|
f5253c7f458a5ce4390b5e967f45bd2f0b9a1de2
|
dosagelib/__init__.py
|
dosagelib/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
|
Return a fallback "version" if dosage is not installed
|
Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.
|
Python
|
mit
|
webcomics/dosage,peterjanes/dosage,peterjanes/dosage,webcomics/dosage
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
<commit_msg>Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
|
# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.# -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
pass
<commit_msg>Return a fallback "version" if dosage is not installed
Additionally, inform the user on how to fix the problem. Thanks to twb
for noticing this.<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2004-2008 Tristan Seligmann and Jonathan Jacobs
# Copyright (C) 2012-2014 Bastian Kleineidam
# Copyright (C) 2015-2019 Tobias Gruetzmacher
"""
Automated comic downloader. Dosage traverses comic websites in
order to download each strip of the comic. The intended use is for
mirroring the strips locally for ease of viewing; redistribution of the
downloaded strips may violate copyright, and is not advisable unless you
have communicated with all of the relevant copyright holders, described
your intentions, and received permission to distribute.
The primary interface is the 'dosage' commandline script.
Comic modules for each comic are located in L{dosagelib.plugins}.
"""
from __future__ import absolute_import, division, print_function
try:
from importlib.metadata import version, PackageNotFoundError
except ImportError:
from importlib_metadata import version, PackageNotFoundError
from .output import out
AppName = u'dosage'
try:
__version__ = version(AppName) # PEP 396
except PackageNotFoundError:
# package is not installed
out.warn('{} is not installed, no version available.'
' Use at least {!r} or {!r} to fix this.'.format(
AppName, 'pip install -e .', 'setup.py egg_info'))
__version__ = 'ERR.NOT.INSTALLED'
|
aad797bd6ad195c45920f615b898793ce6655e37
|
python/ember/examples/example_extinction.py
|
python/ember/examples/example_extinction.py
|
#!/usr/bin/python
"""
A steady flame is established at a starting strain rate far from exctinction.
Then the strain rate parameter (a) is systematically increased until a steady
flame can no longer be attained. The progression to extinction is summarized
in a plot of maximum temperature vs strain rate. Twin disc flame geometry.
"""
from ember import *
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import multiprocessing
import shutil
output = 'run/ex_extinction'
conf = Config(
Paths(outputDir=output),
# Chemistry(mechanismFile='gri30.cti'),
General(twinFlame=True,
flameGeometry='disc',
unburnedLeft=False,
fixedLeftLocation=True,
nThreads=multiprocessing.cpu_count()//2,
chemistryIntegrator='qss' # default = 'qss' other option = 'cvode'
),
InitialCondition(fuel='CH4:1.0',
oxidizer='N2:7.52, O2:2.0',
equivalenceRatio=0.7,
Tu=298.0,
xLeft=0.0,
xRight=0.020,
),
Grid(
gridMin=1E-8, # Default = 5=-7
centerGridMin=1E-8, # Default = 1E-4
),
Extinction(
method='step', # default = 'step' other option = 'factor'
initialStep=75.0,
minStep=0.5,
# initialFactor=1.05,
# minFactor=1.0001,
reductionFactor=0.4,
cutoffTemp=1500.0,
initialStrainRate=500,
)
)
if __name__ == '__main__':
if os.path.exists(output):
shutil.rmtree(output)
conf.runESR()
data = np.genfromtxt(output+'/extProfile.csv', skip_header=1, delimiter=',')
plt.figure()
plt.xlabel('Strain Rate [1/s]')
plt.ylabel('Max. Temp. [K]')
plt.semilogx(data[:,0], data[:,1])
plt.savefig(output+'/progression.png')
# plt.show()
plt.close()
|
Add extinction strain rate example
|
Add extinction strain rate example
|
Python
|
mit
|
speth/ember,speth/ember,speth/ember
|
Add extinction strain rate example
|
#!/usr/bin/python
"""
A steady flame is established at a starting strain rate far from exctinction.
Then the strain rate parameter (a) is systematically increased until a steady
flame can no longer be attained. The progression to extinction is summarized
in a plot of maximum temperature vs strain rate. Twin disc flame geometry.
"""
from ember import *
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import multiprocessing
import shutil
output = 'run/ex_extinction'
conf = Config(
Paths(outputDir=output),
# Chemistry(mechanismFile='gri30.cti'),
General(twinFlame=True,
flameGeometry='disc',
unburnedLeft=False,
fixedLeftLocation=True,
nThreads=multiprocessing.cpu_count()//2,
chemistryIntegrator='qss' # default = 'qss' other option = 'cvode'
),
InitialCondition(fuel='CH4:1.0',
oxidizer='N2:7.52, O2:2.0',
equivalenceRatio=0.7,
Tu=298.0,
xLeft=0.0,
xRight=0.020,
),
Grid(
gridMin=1E-8, # Default = 5=-7
centerGridMin=1E-8, # Default = 1E-4
),
Extinction(
method='step', # default = 'step' other option = 'factor'
initialStep=75.0,
minStep=0.5,
# initialFactor=1.05,
# minFactor=1.0001,
reductionFactor=0.4,
cutoffTemp=1500.0,
initialStrainRate=500,
)
)
if __name__ == '__main__':
if os.path.exists(output):
shutil.rmtree(output)
conf.runESR()
data = np.genfromtxt(output+'/extProfile.csv', skip_header=1, delimiter=',')
plt.figure()
plt.xlabel('Strain Rate [1/s]')
plt.ylabel('Max. Temp. [K]')
plt.semilogx(data[:,0], data[:,1])
plt.savefig(output+'/progression.png')
# plt.show()
plt.close()
|
<commit_before><commit_msg>Add extinction strain rate example<commit_after>
|
#!/usr/bin/python
"""
A steady flame is established at a starting strain rate far from exctinction.
Then the strain rate parameter (a) is systematically increased until a steady
flame can no longer be attained. The progression to extinction is summarized
in a plot of maximum temperature vs strain rate. Twin disc flame geometry.
"""
from ember import *
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import multiprocessing
import shutil
output = 'run/ex_extinction'
conf = Config(
Paths(outputDir=output),
# Chemistry(mechanismFile='gri30.cti'),
General(twinFlame=True,
flameGeometry='disc',
unburnedLeft=False,
fixedLeftLocation=True,
nThreads=multiprocessing.cpu_count()//2,
chemistryIntegrator='qss' # default = 'qss' other option = 'cvode'
),
InitialCondition(fuel='CH4:1.0',
oxidizer='N2:7.52, O2:2.0',
equivalenceRatio=0.7,
Tu=298.0,
xLeft=0.0,
xRight=0.020,
),
Grid(
gridMin=1E-8, # Default = 5=-7
centerGridMin=1E-8, # Default = 1E-4
),
Extinction(
method='step', # default = 'step' other option = 'factor'
initialStep=75.0,
minStep=0.5,
# initialFactor=1.05,
# minFactor=1.0001,
reductionFactor=0.4,
cutoffTemp=1500.0,
initialStrainRate=500,
)
)
if __name__ == '__main__':
if os.path.exists(output):
shutil.rmtree(output)
conf.runESR()
data = np.genfromtxt(output+'/extProfile.csv', skip_header=1, delimiter=',')
plt.figure()
plt.xlabel('Strain Rate [1/s]')
plt.ylabel('Max. Temp. [K]')
plt.semilogx(data[:,0], data[:,1])
plt.savefig(output+'/progression.png')
# plt.show()
plt.close()
|
Add extinction strain rate example#!/usr/bin/python
"""
A steady flame is established at a starting strain rate far from exctinction.
Then the strain rate parameter (a) is systematically increased until a steady
flame can no longer be attained. The progression to extinction is summarized
in a plot of maximum temperature vs strain rate. Twin disc flame geometry.
"""
from ember import *
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import multiprocessing
import shutil
output = 'run/ex_extinction'
conf = Config(
Paths(outputDir=output),
# Chemistry(mechanismFile='gri30.cti'),
General(twinFlame=True,
flameGeometry='disc',
unburnedLeft=False,
fixedLeftLocation=True,
nThreads=multiprocessing.cpu_count()//2,
chemistryIntegrator='qss' # default = 'qss' other option = 'cvode'
),
InitialCondition(fuel='CH4:1.0',
oxidizer='N2:7.52, O2:2.0',
equivalenceRatio=0.7,
Tu=298.0,
xLeft=0.0,
xRight=0.020,
),
Grid(
gridMin=1E-8, # Default = 5=-7
centerGridMin=1E-8, # Default = 1E-4
),
Extinction(
method='step', # default = 'step' other option = 'factor'
initialStep=75.0,
minStep=0.5,
# initialFactor=1.05,
# minFactor=1.0001,
reductionFactor=0.4,
cutoffTemp=1500.0,
initialStrainRate=500,
)
)
if __name__ == '__main__':
if os.path.exists(output):
shutil.rmtree(output)
conf.runESR()
data = np.genfromtxt(output+'/extProfile.csv', skip_header=1, delimiter=',')
plt.figure()
plt.xlabel('Strain Rate [1/s]')
plt.ylabel('Max. Temp. [K]')
plt.semilogx(data[:,0], data[:,1])
plt.savefig(output+'/progression.png')
# plt.show()
plt.close()
|
<commit_before><commit_msg>Add extinction strain rate example<commit_after>#!/usr/bin/python
"""
A steady flame is established at a starting strain rate far from exctinction.
Then the strain rate parameter (a) is systematically increased until a steady
flame can no longer be attained. The progression to extinction is summarized
in a plot of maximum temperature vs strain rate. Twin disc flame geometry.
"""
from ember import *
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import multiprocessing
import shutil
output = 'run/ex_extinction'
conf = Config(
Paths(outputDir=output),
# Chemistry(mechanismFile='gri30.cti'),
General(twinFlame=True,
flameGeometry='disc',
unburnedLeft=False,
fixedLeftLocation=True,
nThreads=multiprocessing.cpu_count()//2,
chemistryIntegrator='qss' # default = 'qss' other option = 'cvode'
),
InitialCondition(fuel='CH4:1.0',
oxidizer='N2:7.52, O2:2.0',
equivalenceRatio=0.7,
Tu=298.0,
xLeft=0.0,
xRight=0.020,
),
Grid(
gridMin=1E-8, # Default = 5=-7
centerGridMin=1E-8, # Default = 1E-4
),
Extinction(
method='step', # default = 'step' other option = 'factor'
initialStep=75.0,
minStep=0.5,
# initialFactor=1.05,
# minFactor=1.0001,
reductionFactor=0.4,
cutoffTemp=1500.0,
initialStrainRate=500,
)
)
if __name__ == '__main__':
if os.path.exists(output):
shutil.rmtree(output)
conf.runESR()
data = np.genfromtxt(output+'/extProfile.csv', skip_header=1, delimiter=',')
plt.figure()
plt.xlabel('Strain Rate [1/s]')
plt.ylabel('Max. Temp. [K]')
plt.semilogx(data[:,0], data[:,1])
plt.savefig(output+'/progression.png')
# plt.show()
plt.close()
|
|
33a758710fb2949475c735e3d92f85237219bf1d
|
tests/qctests/test_qc_anomaly_detection.py
|
tests/qctests/test_qc_anomaly_detection.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Check anomaly detection QC test
"""
import numpy as np
from cotede.utils.supportdata import download_testdata
from cotede.qc import fProfileQC
def test():
""" Only test if run. Must improve this.
"""
datafile = download_testdata("dPIRX010.cnv")
pqc = fProfileQC(datafile, cfg='anomaly_detection')
assert sorted(np.unique(pqc.flags['TEMP']['anomaly_detection'])) == [1,4]
|
Test to apply anomaly detection Q.C.
|
Test to apply anomaly detection Q.C.
|
Python
|
bsd-3-clause
|
castelao/CoTeDe
|
Test to apply anomaly detection Q.C.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Check anomaly detection QC test
"""
import numpy as np
from cotede.utils.supportdata import download_testdata
from cotede.qc import fProfileQC
def test():
""" Only test if run. Must improve this.
"""
datafile = download_testdata("dPIRX010.cnv")
pqc = fProfileQC(datafile, cfg='anomaly_detection')
assert sorted(np.unique(pqc.flags['TEMP']['anomaly_detection'])) == [1,4]
|
<commit_before><commit_msg>Test to apply anomaly detection Q.C.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Check anomaly detection QC test
"""
import numpy as np
from cotede.utils.supportdata import download_testdata
from cotede.qc import fProfileQC
def test():
""" Only test if run. Must improve this.
"""
datafile = download_testdata("dPIRX010.cnv")
pqc = fProfileQC(datafile, cfg='anomaly_detection')
assert sorted(np.unique(pqc.flags['TEMP']['anomaly_detection'])) == [1,4]
|
Test to apply anomaly detection Q.C.#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Check anomaly detection QC test
"""
import numpy as np
from cotede.utils.supportdata import download_testdata
from cotede.qc import fProfileQC
def test():
""" Only test if run. Must improve this.
"""
datafile = download_testdata("dPIRX010.cnv")
pqc = fProfileQC(datafile, cfg='anomaly_detection')
assert sorted(np.unique(pqc.flags['TEMP']['anomaly_detection'])) == [1,4]
|
<commit_before><commit_msg>Test to apply anomaly detection Q.C.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" Check anomaly detection QC test
"""
import numpy as np
from cotede.utils.supportdata import download_testdata
from cotede.qc import fProfileQC
def test():
""" Only test if run. Must improve this.
"""
datafile = download_testdata("dPIRX010.cnv")
pqc = fProfileQC(datafile, cfg='anomaly_detection')
assert sorted(np.unique(pqc.flags['TEMP']['anomaly_detection'])) == [1,4]
|
|
a2e0d92eeb7e07cbad06368ccce22cc5d360ae55
|
examples/accel_i2c.py
|
examples/accel_i2c.py
|
# This is an example on how to access accelerometer on
# PyBoard directly using I2C bus. As such, it's more
# intended to be an I2C example, rather than accelerometer
# example. For the latter, using pyb.Accel class is
# much easier.
import pyb
import time
# Accelerometer needs to be powered on first. Even
# though signal is called "AVDD", and there's separate
# "DVDD", without AVDD, it won't event talk on I2C bus.
accel_pwr = pyb.Pin("MMA_AVDD")
accel_pwr.value(1)
i2c = pyb.I2C(1)
addrs = i2c.scan()
print("Scanning devices:", [hex(x) for x in addrs])
if 0x4c not in addrs:
print("Accelerometer is not detected")
ACCEL_ADDR = 0x4c
ACCEL_AXIS_X_REG = 0
ACCEL_MODE_REG = 7
# Now activate measurements
i2c.mem_write(b"\x01", ACCEL_ADDR, ACCEL_MODE_REG)
print("Try to move accelerometer and watch the values")
while True:
val = i2c.mem_read(1, ACCEL_ADDR, ACCEL_AXIS_X_REG)
print(val[0])
time.sleep(1)
|
Add example of I2C usage, taking PyBoard accelerometer as subject.
|
examples: Add example of I2C usage, taking PyBoard accelerometer as subject.
|
Python
|
mit
|
lowRISC/micropython,turbinenreiter/micropython,jmarcelino/pycom-micropython,drrk/micropython,mpalomer/micropython,puuu/micropython,puuu/micropython,dhylands/micropython,turbinenreiter/micropython,chrisdearman/micropython,AriZuu/micropython,deshipu/micropython,pfalcon/micropython,ChuckM/micropython,AriZuu/micropython,EcmaXp/micropython,dmazzella/micropython,mpalomer/micropython,trezor/micropython,adafruit/micropython,adafruit/micropython,trezor/micropython,jmarcelino/pycom-micropython,micropython/micropython-esp32,SHA2017-badge/micropython-esp32,blmorris/micropython,neilh10/micropython,selste/micropython,alex-robbins/micropython,AriZuu/micropython,supergis/micropython,ryannathans/micropython,lowRISC/micropython,turbinenreiter/micropython,galenhz/micropython,dhylands/micropython,noahwilliamsson/micropython,selste/micropython,matthewelse/micropython,HenrikSolver/micropython,alex-march/micropython,danicampora/micropython,emfcamp/micropython,praemdonck/micropython,mhoffma/micropython,cloudformdesign/micropython,dinau/micropython,puuu/micropython,dinau/micropython,blazewicz/micropython,neilh10/micropython,noahwilliamsson/micropython,xhat/micropython,tobbad/micropython,HenrikSolver/micropython,puuu/micropython,alex-march/micropython,misterdanb/micropython,chrisdearman/micropython,supergis/micropython,lowRISC/micropython,chrisdearman/micropython,galenhz/micropython,tralamazza/micropython,pozetroninc/micropython,micropython/micropython-esp32,mhoffma/micropython,pramasoul/micropython,blmorris/micropython,drrk/micropython,alex-march/micropython,oopy/micropython,ernesto-g/micropython,toolmacher/micropython,vitiral/micropython,EcmaXp/micropython,noahwilliamsson/micropython,ruffy91/micropython,selste/micropython,misterdanb/micropython,omtinez/micropython,mhoffma/micropython,alex-robbins/micropython,matthewelse/micropython,redbear/micropython,ChuckM/micropython,kerneltask/micropython,vitiral/micropython,toolmacher/micropython,ChuckM/micropython,Timmenem/micropython,bvernoux/micropython,martinribelotta/micropython,adafruit/micropython,ruffy91/micropython,tobbad/micropython,blmorris/micropython,redbear/micropython,PappaPeppar/micropython,noahwilliamsson/micropython,matthewelse/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,kerneltask/micropython,Timmenem/micropython,cloudformdesign/micropython,tralamazza/micropython,hiway/micropython,PappaPeppar/micropython,kerneltask/micropython,misterdanb/micropython,cwyark/micropython,xhat/micropython,blmorris/micropython,dinau/micropython,torwag/micropython,jmarcelino/pycom-micropython,mianos/micropython,cwyark/micropython,lowRISC/micropython,TDAbboud/micropython,mhoffma/micropython,trezor/micropython,MrSurly/micropython,MrSurly/micropython,dmazzella/micropython,tobbad/micropython,jmarcelino/pycom-micropython,martinribelotta/micropython,misterdanb/micropython,blazewicz/micropython,redbear/micropython,adafruit/circuitpython,PappaPeppar/micropython,infinnovation/micropython,martinribelotta/micropython,selste/micropython,chrisdearman/micropython,ruffy91/micropython,dhylands/micropython,dhylands/micropython,ganshun666/micropython,ernesto-g/micropython,drrk/micropython,micropython/micropython-esp32,supergis/micropython,MrSurly/micropython-esp32,ryannathans/micropython,selste/micropython,henriknelson/micropython,emfcamp/micropython,turbinenreiter/micropython,misterdanb/micropython,hosaka/micropython,praemdonck/micropython,danicampora/micropython,adamkh/micropython,redbear/micropython,swegener/micropython,tuc-osg/micropython,micropython/micropython-esp32,tuc-osg/micropython,toolmacher/micropython,ernesto-g/micropython,hiway/micropython,adafruit/circuitpython,hosaka/micropython,kerneltask/micropython,AriZuu/micropython,MrSurly/micropython,adafruit/circuitpython,matthewelse/micropython,martinribelotta/micropython,ganshun666/micropython,praemdonck/micropython,mhoffma/micropython,henriknelson/micropython,xhat/micropython,torwag/micropython,mpalomer/micropython,henriknelson/micropython,deshipu/micropython,cwyark/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,Peetz0r/micropython-esp32,matthewelse/micropython,dxxb/micropython,noahwilliamsson/micropython,MrSurly/micropython,cloudformdesign/micropython,dinau/micropython,MrSurly/micropython-esp32,TDAbboud/micropython,adafruit/micropython,mianos/micropython,adafruit/circuitpython,danicampora/micropython,deshipu/micropython,TDAbboud/micropython,bvernoux/micropython,hiway/micropython,alex-march/micropython,vitiral/micropython,tralamazza/micropython,tobbad/micropython,pfalcon/micropython,bvernoux/micropython,MrSurly/micropython-esp32,PappaPeppar/micropython,kerneltask/micropython,swegener/micropython,Peetz0r/micropython-esp32,MrSurly/micropython-esp32,bvernoux/micropython,blazewicz/micropython,toolmacher/micropython,oopy/micropython,TDAbboud/micropython,galenhz/micropython,turbinenreiter/micropython,blazewicz/micropython,torwag/micropython,adamkh/micropython,infinnovation/micropython,ChuckM/micropython,oopy/micropython,tralamazza/micropython,HenrikSolver/micropython,emfcamp/micropython,Timmenem/micropython,EcmaXp/micropython,supergis/micropython,trezor/micropython,pfalcon/micropython,pozetroninc/micropython,ruffy91/micropython,PappaPeppar/micropython,dmazzella/micropython,swegener/micropython,deshipu/micropython,oopy/micropython,adafruit/micropython,chrisdearman/micropython,tobbad/micropython,ganshun666/micropython,vitiral/micropython,mpalomer/micropython,adamkh/micropython,ruffy91/micropython,adamkh/micropython,hosaka/micropython,dxxb/micropython,pfalcon/micropython,ryannathans/micropython,MrSurly/micropython-esp32,hosaka/micropython,ernesto-g/micropython,galenhz/micropython,micropython/micropython-esp32,pramasoul/micropython,mpalomer/micropython,vitiral/micropython,alex-robbins/micropython,adamkh/micropython,adafruit/circuitpython,EcmaXp/micropython,ChuckM/micropython,emfcamp/micropython,MrSurly/micropython,matthewelse/micropython,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,hosaka/micropython,blmorris/micropython,torwag/micropython,dxxb/micropython,Timmenem/micropython,mianos/micropython,drrk/micropython,omtinez/micropython,HenrikSolver/micropython,blazewicz/micropython,omtinez/micropython,xhat/micropython,HenrikSolver/micropython,cloudformdesign/micropython,infinnovation/micropython,hiway/micropython,trezor/micropython,henriknelson/micropython,danicampora/micropython,swegener/micropython,drrk/micropython,TDAbboud/micropython,swegener/micropython,tuc-osg/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,tuc-osg/micropython,lowRISC/micropython,alex-robbins/micropython,pramasoul/micropython,dhylands/micropython,dxxb/micropython,praemdonck/micropython,neilh10/micropython,omtinez/micropython,xhat/micropython,cwyark/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,neilh10/micropython,torwag/micropython,jmarcelino/pycom-micropython,ganshun666/micropython,infinnovation/micropython,redbear/micropython,dxxb/micropython,dmazzella/micropython,Peetz0r/micropython-esp32,oopy/micropython,pozetroninc/micropython,ernesto-g/micropython,mianos/micropython,tuc-osg/micropython,supergis/micropython,mianos/micropython,EcmaXp/micropython,pramasoul/micropython,galenhz/micropython,pozetroninc/micropython,toolmacher/micropython,deshipu/micropython,alex-march/micropython,pramasoul/micropython,cloudformdesign/micropython,cwyark/micropython,AriZuu/micropython,martinribelotta/micropython,omtinez/micropython,ryannathans/micropython,adafruit/circuitpython,Timmenem/micropython,danicampora/micropython,ryannathans/micropython,infinnovation/micropython,hiway/micropython,dinau/micropython,neilh10/micropython,pfalcon/micropython,emfcamp/micropython,pozetroninc/micropython,praemdonck/micropython,ganshun666/micropython
|
examples: Add example of I2C usage, taking PyBoard accelerometer as subject.
|
# This is an example on how to access accelerometer on
# PyBoard directly using I2C bus. As such, it's more
# intended to be an I2C example, rather than accelerometer
# example. For the latter, using pyb.Accel class is
# much easier.
import pyb
import time
# Accelerometer needs to be powered on first. Even
# though signal is called "AVDD", and there's separate
# "DVDD", without AVDD, it won't event talk on I2C bus.
accel_pwr = pyb.Pin("MMA_AVDD")
accel_pwr.value(1)
i2c = pyb.I2C(1)
addrs = i2c.scan()
print("Scanning devices:", [hex(x) for x in addrs])
if 0x4c not in addrs:
print("Accelerometer is not detected")
ACCEL_ADDR = 0x4c
ACCEL_AXIS_X_REG = 0
ACCEL_MODE_REG = 7
# Now activate measurements
i2c.mem_write(b"\x01", ACCEL_ADDR, ACCEL_MODE_REG)
print("Try to move accelerometer and watch the values")
while True:
val = i2c.mem_read(1, ACCEL_ADDR, ACCEL_AXIS_X_REG)
print(val[0])
time.sleep(1)
|
<commit_before><commit_msg>examples: Add example of I2C usage, taking PyBoard accelerometer as subject.<commit_after>
|
# This is an example on how to access accelerometer on
# PyBoard directly using I2C bus. As such, it's more
# intended to be an I2C example, rather than accelerometer
# example. For the latter, using pyb.Accel class is
# much easier.
import pyb
import time
# Accelerometer needs to be powered on first. Even
# though signal is called "AVDD", and there's separate
# "DVDD", without AVDD, it won't event talk on I2C bus.
accel_pwr = pyb.Pin("MMA_AVDD")
accel_pwr.value(1)
i2c = pyb.I2C(1)
addrs = i2c.scan()
print("Scanning devices:", [hex(x) for x in addrs])
if 0x4c not in addrs:
print("Accelerometer is not detected")
ACCEL_ADDR = 0x4c
ACCEL_AXIS_X_REG = 0
ACCEL_MODE_REG = 7
# Now activate measurements
i2c.mem_write(b"\x01", ACCEL_ADDR, ACCEL_MODE_REG)
print("Try to move accelerometer and watch the values")
while True:
val = i2c.mem_read(1, ACCEL_ADDR, ACCEL_AXIS_X_REG)
print(val[0])
time.sleep(1)
|
examples: Add example of I2C usage, taking PyBoard accelerometer as subject.# This is an example on how to access accelerometer on
# PyBoard directly using I2C bus. As such, it's more
# intended to be an I2C example, rather than accelerometer
# example. For the latter, using pyb.Accel class is
# much easier.
import pyb
import time
# Accelerometer needs to be powered on first. Even
# though signal is called "AVDD", and there's separate
# "DVDD", without AVDD, it won't event talk on I2C bus.
accel_pwr = pyb.Pin("MMA_AVDD")
accel_pwr.value(1)
i2c = pyb.I2C(1)
addrs = i2c.scan()
print("Scanning devices:", [hex(x) for x in addrs])
if 0x4c not in addrs:
print("Accelerometer is not detected")
ACCEL_ADDR = 0x4c
ACCEL_AXIS_X_REG = 0
ACCEL_MODE_REG = 7
# Now activate measurements
i2c.mem_write(b"\x01", ACCEL_ADDR, ACCEL_MODE_REG)
print("Try to move accelerometer and watch the values")
while True:
val = i2c.mem_read(1, ACCEL_ADDR, ACCEL_AXIS_X_REG)
print(val[0])
time.sleep(1)
|
<commit_before><commit_msg>examples: Add example of I2C usage, taking PyBoard accelerometer as subject.<commit_after># This is an example on how to access accelerometer on
# PyBoard directly using I2C bus. As such, it's more
# intended to be an I2C example, rather than accelerometer
# example. For the latter, using pyb.Accel class is
# much easier.
import pyb
import time
# Accelerometer needs to be powered on first. Even
# though signal is called "AVDD", and there's separate
# "DVDD", without AVDD, it won't event talk on I2C bus.
accel_pwr = pyb.Pin("MMA_AVDD")
accel_pwr.value(1)
i2c = pyb.I2C(1)
addrs = i2c.scan()
print("Scanning devices:", [hex(x) for x in addrs])
if 0x4c not in addrs:
print("Accelerometer is not detected")
ACCEL_ADDR = 0x4c
ACCEL_AXIS_X_REG = 0
ACCEL_MODE_REG = 7
# Now activate measurements
i2c.mem_write(b"\x01", ACCEL_ADDR, ACCEL_MODE_REG)
print("Try to move accelerometer and watch the values")
while True:
val = i2c.mem_read(1, ACCEL_ADDR, ACCEL_AXIS_X_REG)
print(val[0])
time.sleep(1)
|
|
045751341cb75819907f21eb6ce75791482b6875
|
tests/test_mutate.py
|
tests/test_mutate.py
|
import unittest
from ooni.kit import daphn3
class TestDaphn3(unittest.TestCase):
def test_mutate_string(self):
original_string = '\x00\x00\x00'
mutated = daphn3.daphn3MutateString(original_string, 1)
self.assertEqual(mutated, '\x00\x01\x00')
def test_mutate_daphn3(self):
original_dict = [{'client': '\x00\x00\x00'},
{'server': '\x00\x00\x00'}]
mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
{'server': '\x00\x01\x00'}])
|
Add unittest for daphn3 mutator.
|
Add unittest for daphn3 mutator.
|
Python
|
bsd-2-clause
|
Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe
|
Add unittest for daphn3 mutator.
|
import unittest
from ooni.kit import daphn3
class TestDaphn3(unittest.TestCase):
def test_mutate_string(self):
original_string = '\x00\x00\x00'
mutated = daphn3.daphn3MutateString(original_string, 1)
self.assertEqual(mutated, '\x00\x01\x00')
def test_mutate_daphn3(self):
original_dict = [{'client': '\x00\x00\x00'},
{'server': '\x00\x00\x00'}]
mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
{'server': '\x00\x01\x00'}])
|
<commit_before><commit_msg>Add unittest for daphn3 mutator.<commit_after>
|
import unittest
from ooni.kit import daphn3
class TestDaphn3(unittest.TestCase):
def test_mutate_string(self):
original_string = '\x00\x00\x00'
mutated = daphn3.daphn3MutateString(original_string, 1)
self.assertEqual(mutated, '\x00\x01\x00')
def test_mutate_daphn3(self):
original_dict = [{'client': '\x00\x00\x00'},
{'server': '\x00\x00\x00'}]
mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
{'server': '\x00\x01\x00'}])
|
Add unittest for daphn3 mutator.import unittest
from ooni.kit import daphn3
class TestDaphn3(unittest.TestCase):
def test_mutate_string(self):
original_string = '\x00\x00\x00'
mutated = daphn3.daphn3MutateString(original_string, 1)
self.assertEqual(mutated, '\x00\x01\x00')
def test_mutate_daphn3(self):
original_dict = [{'client': '\x00\x00\x00'},
{'server': '\x00\x00\x00'}]
mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
{'server': '\x00\x01\x00'}])
|
<commit_before><commit_msg>Add unittest for daphn3 mutator.<commit_after>import unittest
from ooni.kit import daphn3
class TestDaphn3(unittest.TestCase):
def test_mutate_string(self):
original_string = '\x00\x00\x00'
mutated = daphn3.daphn3MutateString(original_string, 1)
self.assertEqual(mutated, '\x00\x01\x00')
def test_mutate_daphn3(self):
original_dict = [{'client': '\x00\x00\x00'},
{'server': '\x00\x00\x00'}]
mutated_dict = daphn3.daphn3Mutate(original_dict, 1, 1)
self.assertEqual(mutated_dict, [{'client': '\x00\x00\x00'},
{'server': '\x00\x01\x00'}])
|
|
c2a9c270643c0f4f0dc84979681b45f3721d5cae
|
test/functional/rpc_part_signmessage.py
|
test/functional/rpc_part_signmessage.py
|
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_particl import ParticlTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].extkeyimportmaster('abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb')
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = '7shnesmjFcQZoxXCsNV55v7hrbQMtBfMNscuBkYrLa1mcJNPbXhU'
address = 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it'
expected_signature = 'H/ededxXrX9m9uygWRZyfdpEKiKbsHpXZtdWqM1BP+AfDZVV1y0YRcOsGmyKEmDoD7R8Tqa2ptk3XAm71ELGZLo='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with a 256bit address with wallet')
address = self.nodes[0].getnewaddress('', False, False, True)
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
|
Add test for 256bit address signing and verification
|
Add test for 256bit address signing and verification
|
Python
|
mit
|
tecnovert/particl-core,tecnovert/particl-core,tecnovert/particl-core,particl/particl-core,particl/particl-core,particl/particl-core,particl/particl-core,particl/particl-core,tecnovert/particl-core,tecnovert/particl-core,tecnovert/particl-core,particl/particl-core
|
Add test for 256bit address signing and verification
|
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_particl import ParticlTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].extkeyimportmaster('abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb')
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = '7shnesmjFcQZoxXCsNV55v7hrbQMtBfMNscuBkYrLa1mcJNPbXhU'
address = 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it'
expected_signature = 'H/ededxXrX9m9uygWRZyfdpEKiKbsHpXZtdWqM1BP+AfDZVV1y0YRcOsGmyKEmDoD7R8Tqa2ptk3XAm71ELGZLo='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with a 256bit address with wallet')
address = self.nodes[0].getnewaddress('', False, False, True)
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
|
<commit_before><commit_msg>Add test for 256bit address signing and verification<commit_after>
|
#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_particl import ParticlTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].extkeyimportmaster('abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb')
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = '7shnesmjFcQZoxXCsNV55v7hrbQMtBfMNscuBkYrLa1mcJNPbXhU'
address = 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it'
expected_signature = 'H/ededxXrX9m9uygWRZyfdpEKiKbsHpXZtdWqM1BP+AfDZVV1y0YRcOsGmyKEmDoD7R8Tqa2ptk3XAm71ELGZLo='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with a 256bit address with wallet')
address = self.nodes[0].getnewaddress('', False, False, True)
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
|
Add test for 256bit address signing and verification#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_particl import ParticlTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].extkeyimportmaster('abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb')
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = '7shnesmjFcQZoxXCsNV55v7hrbQMtBfMNscuBkYrLa1mcJNPbXhU'
address = 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it'
expected_signature = 'H/ededxXrX9m9uygWRZyfdpEKiKbsHpXZtdWqM1BP+AfDZVV1y0YRcOsGmyKEmDoD7R8Tqa2ptk3XAm71ELGZLo='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with a 256bit address with wallet')
address = self.nodes[0].getnewaddress('', False, False, True)
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
|
<commit_before><commit_msg>Add test for 256bit address signing and verification<commit_after>#!/usr/bin/env python3
# Copyright (c) 2016-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_particl import ParticlTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(ParticlTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[]]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def setup_network(self, split=False):
self.add_nodes(self.num_nodes, extra_args=self.extra_args)
self.start_nodes()
def run_test(self):
self.nodes[0].extkeyimportmaster('abandon baby cabbage dad eager fabric gadget habit ice kangaroo lab absorb')
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = '7shnesmjFcQZoxXCsNV55v7hrbQMtBfMNscuBkYrLa1mcJNPbXhU'
address = 'pX9N6S76ZtA5BfsiJmqBbjaEgLMHpt58it'
expected_signature = 'H/ededxXrX9m9uygWRZyfdpEKiKbsHpXZtdWqM1BP+AfDZVV1y0YRcOsGmyKEmDoD7R8Tqa2ptk3XAm71ELGZLo='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with a 256bit address with wallet')
address = self.nodes[0].getnewaddress('', False, False, True)
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
|
|
0a914b87f143782d5c927740acc9de251ec7a2fc
|
specter/tests/__main__.py
|
specter/tests/__main__.py
|
import unittest
# Import test modules.
from .test_events import *
from .test_forms import *
from .test_frames import *
from .test_navigation import *
from .test_open import *
from .test_qtmessage import *
from .test_redirection import *
from .test_registry import *
from .test_selectors import *
from .test_signals import *
from .test_simple import *
from .test_ssl import *
from .test_util import *
if __name__ == "__main__":
unittest.main()
|
Make tests trivially runnable without py.test
|
Make tests trivially runnable without py.test
|
Python
|
mit
|
andrew-d/Specter.py,andrew-d/Specter.py,andrew-d/Specter.py
|
Make tests trivially runnable without py.test
|
import unittest
# Import test modules.
from .test_events import *
from .test_forms import *
from .test_frames import *
from .test_navigation import *
from .test_open import *
from .test_qtmessage import *
from .test_redirection import *
from .test_registry import *
from .test_selectors import *
from .test_signals import *
from .test_simple import *
from .test_ssl import *
from .test_util import *
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Make tests trivially runnable without py.test<commit_after>
|
import unittest
# Import test modules.
from .test_events import *
from .test_forms import *
from .test_frames import *
from .test_navigation import *
from .test_open import *
from .test_qtmessage import *
from .test_redirection import *
from .test_registry import *
from .test_selectors import *
from .test_signals import *
from .test_simple import *
from .test_ssl import *
from .test_util import *
if __name__ == "__main__":
unittest.main()
|
Make tests trivially runnable without py.testimport unittest
# Import test modules.
from .test_events import *
from .test_forms import *
from .test_frames import *
from .test_navigation import *
from .test_open import *
from .test_qtmessage import *
from .test_redirection import *
from .test_registry import *
from .test_selectors import *
from .test_signals import *
from .test_simple import *
from .test_ssl import *
from .test_util import *
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Make tests trivially runnable without py.test<commit_after>import unittest
# Import test modules.
from .test_events import *
from .test_forms import *
from .test_frames import *
from .test_navigation import *
from .test_open import *
from .test_qtmessage import *
from .test_redirection import *
from .test_registry import *
from .test_selectors import *
from .test_signals import *
from .test_simple import *
from .test_ssl import *
from .test_util import *
if __name__ == "__main__":
unittest.main()
|
|
3e75a66e7f90e04a7d2c53bc619ecebd706f7ca2
|
st2common/tests/unit/test_db_pack.py
|
st2common/tests/unit/test_db_pack.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2tests import DbTestCase
from tests.unit.base import BaseDBModelCRUDTestCase
class PackDBModelCRUDTestCase(BaseDBModelCRUDTestCase, DbTestCase):
model_class = PackDB
persistance_class = Pack
model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen',
'path': '/opt/stackstorm/packs/yolo_ci/'
}
update_attribute_name = 'author'
def test_path_none(self):
PackDBModelCRUDTestCase.model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen'
}
super(PackDBModelCRUDTestCase, self).test_crud_operations()
|
Add unit tests for pack db models
|
Add unit tests for pack db models
|
Python
|
apache-2.0
|
Plexxi/st2,nzlosh/st2,Plexxi/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2
|
Add unit tests for pack db models
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2tests import DbTestCase
from tests.unit.base import BaseDBModelCRUDTestCase
class PackDBModelCRUDTestCase(BaseDBModelCRUDTestCase, DbTestCase):
model_class = PackDB
persistance_class = Pack
model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen',
'path': '/opt/stackstorm/packs/yolo_ci/'
}
update_attribute_name = 'author'
def test_path_none(self):
PackDBModelCRUDTestCase.model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen'
}
super(PackDBModelCRUDTestCase, self).test_crud_operations()
|
<commit_before><commit_msg>Add unit tests for pack db models<commit_after>
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2tests import DbTestCase
from tests.unit.base import BaseDBModelCRUDTestCase
class PackDBModelCRUDTestCase(BaseDBModelCRUDTestCase, DbTestCase):
model_class = PackDB
persistance_class = Pack
model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen',
'path': '/opt/stackstorm/packs/yolo_ci/'
}
update_attribute_name = 'author'
def test_path_none(self):
PackDBModelCRUDTestCase.model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen'
}
super(PackDBModelCRUDTestCase, self).test_crud_operations()
|
Add unit tests for pack db models# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2tests import DbTestCase
from tests.unit.base import BaseDBModelCRUDTestCase
class PackDBModelCRUDTestCase(BaseDBModelCRUDTestCase, DbTestCase):
model_class = PackDB
persistance_class = Pack
model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen',
'path': '/opt/stackstorm/packs/yolo_ci/'
}
update_attribute_name = 'author'
def test_path_none(self):
PackDBModelCRUDTestCase.model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen'
}
super(PackDBModelCRUDTestCase, self).test_crud_operations()
|
<commit_before><commit_msg>Add unit tests for pack db models<commit_after># Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the 'License'); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.models.db.pack import PackDB
from st2common.persistence.pack import Pack
from st2tests import DbTestCase
from tests.unit.base import BaseDBModelCRUDTestCase
class PackDBModelCRUDTestCase(BaseDBModelCRUDTestCase, DbTestCase):
model_class = PackDB
persistance_class = Pack
model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen',
'path': '/opt/stackstorm/packs/yolo_ci/'
}
update_attribute_name = 'author'
def test_path_none(self):
PackDBModelCRUDTestCase.model_class_kwargs = {
'name': 'Yolo CI',
'ref': 'yolo_ci',
'description': 'YOLO CI pack',
'version': '0.1.0',
'author': 'Volkswagen'
}
super(PackDBModelCRUDTestCase, self).test_crud_operations()
|
|
c3ae360e506e9cb411db830d7faf1bdb0288c0a4
|
arim/models.py
|
arim/models.py
|
from django.db import models
class Autoreg(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
Add model for 'autoreg' table
|
Add model for 'autoreg' table
|
Python
|
bsd-3-clause
|
drkitty/arim,drkitty/arim,OSU-Net/arim,drkitty/arim,OSU-Net/arim,OSU-Net/arim
|
Add model for 'autoreg' table
|
from django.db import models
class Autoreg(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
<commit_before><commit_msg>Add model for 'autoreg' table<commit_after>
|
from django.db import models
class Autoreg(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
Add model for 'autoreg' tablefrom django.db import models
class Autoreg(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
<commit_before><commit_msg>Add model for 'autoreg' table<commit_after>from django.db import models
class Autoreg(models.Model):
class Meta:
db_table = 'autoreg'
mac = models.CharField(max_length=17, db_index=True)
ip = models.IntegerField(primary_key=True)
date = models.IntegerField()
|
|
a3d13b11d148738c6203c986461832d9d154577c
|
scripts/tne_campaign_ranking.py
|
scripts/tne_campaign_ranking.py
|
from datetime import date
import xlsxwriter
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.funding.models import Funding, Donor
from bluebottle.geo.models import Location
OFFICE_NAME = 'Segou'
TARGET = 328000
DEADLINE = date(2022, 8, 13)
def run(*args):
tne = Client.objects.get(client_name='nexteconomy')
with LocalTenant(tne, clear_tenant=True):
result = []
location = Location.objects.get(name=OFFICE_NAME)
campaigns = Funding.objects.filter(
initiative__location__name=OFFICE_NAME,
deadline__date=DEADLINE,
status__in=('succeeded', 'partially_funded')
)
for campaign in campaigns:
donors = campaign.contributors.instance_of(
Donor
).filter(
status='succeeded'
).order_by(
'created'
)
total = 0
for donor in donors:
total += donor.amount.amount
if total > TARGET:
result.append({
'id': campaign.id,
'title': campaign.title,
'status': campaign.status,
'target reached': str(donor.created),
})
break
workbook = xlsxwriter.Workbook(f'TNE-{location.name}-{DEADLINE}.xlsx', {'remove_timezone': True})
worksheet = workbook.add_worksheet()
worksheet.write_row(0, 0, result[0].keys())
for (index, row) in enumerate(result):
worksheet.write_row(index + 1, 0, row.values())
workbook.close()
|
Add script to rank tne activities on when they reached a specific target
|
Add script to rank tne activities on when they reached a specific target
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add script to rank tne activities on when they reached a specific target
|
from datetime import date
import xlsxwriter
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.funding.models import Funding, Donor
from bluebottle.geo.models import Location
OFFICE_NAME = 'Segou'
TARGET = 328000
DEADLINE = date(2022, 8, 13)
def run(*args):
tne = Client.objects.get(client_name='nexteconomy')
with LocalTenant(tne, clear_tenant=True):
result = []
location = Location.objects.get(name=OFFICE_NAME)
campaigns = Funding.objects.filter(
initiative__location__name=OFFICE_NAME,
deadline__date=DEADLINE,
status__in=('succeeded', 'partially_funded')
)
for campaign in campaigns:
donors = campaign.contributors.instance_of(
Donor
).filter(
status='succeeded'
).order_by(
'created'
)
total = 0
for donor in donors:
total += donor.amount.amount
if total > TARGET:
result.append({
'id': campaign.id,
'title': campaign.title,
'status': campaign.status,
'target reached': str(donor.created),
})
break
workbook = xlsxwriter.Workbook(f'TNE-{location.name}-{DEADLINE}.xlsx', {'remove_timezone': True})
worksheet = workbook.add_worksheet()
worksheet.write_row(0, 0, result[0].keys())
for (index, row) in enumerate(result):
worksheet.write_row(index + 1, 0, row.values())
workbook.close()
|
<commit_before><commit_msg>Add script to rank tne activities on when they reached a specific target<commit_after>
|
from datetime import date
import xlsxwriter
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.funding.models import Funding, Donor
from bluebottle.geo.models import Location
OFFICE_NAME = 'Segou'
TARGET = 328000
DEADLINE = date(2022, 8, 13)
def run(*args):
tne = Client.objects.get(client_name='nexteconomy')
with LocalTenant(tne, clear_tenant=True):
result = []
location = Location.objects.get(name=OFFICE_NAME)
campaigns = Funding.objects.filter(
initiative__location__name=OFFICE_NAME,
deadline__date=DEADLINE,
status__in=('succeeded', 'partially_funded')
)
for campaign in campaigns:
donors = campaign.contributors.instance_of(
Donor
).filter(
status='succeeded'
).order_by(
'created'
)
total = 0
for donor in donors:
total += donor.amount.amount
if total > TARGET:
result.append({
'id': campaign.id,
'title': campaign.title,
'status': campaign.status,
'target reached': str(donor.created),
})
break
workbook = xlsxwriter.Workbook(f'TNE-{location.name}-{DEADLINE}.xlsx', {'remove_timezone': True})
worksheet = workbook.add_worksheet()
worksheet.write_row(0, 0, result[0].keys())
for (index, row) in enumerate(result):
worksheet.write_row(index + 1, 0, row.values())
workbook.close()
|
Add script to rank tne activities on when they reached a specific targetfrom datetime import date
import xlsxwriter
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.funding.models import Funding, Donor
from bluebottle.geo.models import Location
OFFICE_NAME = 'Segou'
TARGET = 328000
DEADLINE = date(2022, 8, 13)
def run(*args):
tne = Client.objects.get(client_name='nexteconomy')
with LocalTenant(tne, clear_tenant=True):
result = []
location = Location.objects.get(name=OFFICE_NAME)
campaigns = Funding.objects.filter(
initiative__location__name=OFFICE_NAME,
deadline__date=DEADLINE,
status__in=('succeeded', 'partially_funded')
)
for campaign in campaigns:
donors = campaign.contributors.instance_of(
Donor
).filter(
status='succeeded'
).order_by(
'created'
)
total = 0
for donor in donors:
total += donor.amount.amount
if total > TARGET:
result.append({
'id': campaign.id,
'title': campaign.title,
'status': campaign.status,
'target reached': str(donor.created),
})
break
workbook = xlsxwriter.Workbook(f'TNE-{location.name}-{DEADLINE}.xlsx', {'remove_timezone': True})
worksheet = workbook.add_worksheet()
worksheet.write_row(0, 0, result[0].keys())
for (index, row) in enumerate(result):
worksheet.write_row(index + 1, 0, row.values())
workbook.close()
|
<commit_before><commit_msg>Add script to rank tne activities on when they reached a specific target<commit_after>from datetime import date
import xlsxwriter
from bluebottle.clients.models import Client
from bluebottle.clients.utils import LocalTenant
from bluebottle.funding.models import Funding, Donor
from bluebottle.geo.models import Location
OFFICE_NAME = 'Segou'
TARGET = 328000
DEADLINE = date(2022, 8, 13)
def run(*args):
tne = Client.objects.get(client_name='nexteconomy')
with LocalTenant(tne, clear_tenant=True):
result = []
location = Location.objects.get(name=OFFICE_NAME)
campaigns = Funding.objects.filter(
initiative__location__name=OFFICE_NAME,
deadline__date=DEADLINE,
status__in=('succeeded', 'partially_funded')
)
for campaign in campaigns:
donors = campaign.contributors.instance_of(
Donor
).filter(
status='succeeded'
).order_by(
'created'
)
total = 0
for donor in donors:
total += donor.amount.amount
if total > TARGET:
result.append({
'id': campaign.id,
'title': campaign.title,
'status': campaign.status,
'target reached': str(donor.created),
})
break
workbook = xlsxwriter.Workbook(f'TNE-{location.name}-{DEADLINE}.xlsx', {'remove_timezone': True})
worksheet = workbook.add_worksheet()
worksheet.write_row(0, 0, result[0].keys())
for (index, row) in enumerate(result):
worksheet.write_row(index + 1, 0, row.values())
workbook.close()
|
|
87ad66ec3a44cbe1256b05881a7e0e04b1b030ea
|
17B-162/HI/imaging/HI_convolve_to_14B.py
|
17B-162/HI/imaging/HI_convolve_to_14B.py
|
'''
For comparison, smooth and regrid to the 14B data.
'''
import os
from cube_analysis.reprojection import reproject_cube
from paths import (data_path, fourteenB_wGBT_HI_file_dict,
seventeenB_02kms_wGBT_HI_file_dict,
seventeenB_HI_data_02kms_wGBT_path)
out_folder = seventeenB_HI_data_02kms_wGBT_path("14B_match",
no_check=True)
if not os.path.exists(out_folder):
os.mkdir(out_folder)
out_name = seventeenB_02kms_wGBT_HI_file_dict['Cube'].split("/")[-1].rstrip(".fits") + \
".14B_match.fits"
reproject_cube(seventeenB_02kms_wGBT_HI_file_dict['Cube'],
fourteenB_wGBT_HI_file_dict['Cube'],
out_name,
output_folder=out_folder,
save_spectral=False,
is_huge=True,
reproject_type='all',
common_beam=True,
verbose=True,
chunk=40)
|
Make a B+C config HI cube matched to the C-only
|
Make a B+C config HI cube matched to the C-only
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
Make a B+C config HI cube matched to the C-only
|
'''
For comparison, smooth and regrid to the 14B data.
'''
import os
from cube_analysis.reprojection import reproject_cube
from paths import (data_path, fourteenB_wGBT_HI_file_dict,
seventeenB_02kms_wGBT_HI_file_dict,
seventeenB_HI_data_02kms_wGBT_path)
out_folder = seventeenB_HI_data_02kms_wGBT_path("14B_match",
no_check=True)
if not os.path.exists(out_folder):
os.mkdir(out_folder)
out_name = seventeenB_02kms_wGBT_HI_file_dict['Cube'].split("/")[-1].rstrip(".fits") + \
".14B_match.fits"
reproject_cube(seventeenB_02kms_wGBT_HI_file_dict['Cube'],
fourteenB_wGBT_HI_file_dict['Cube'],
out_name,
output_folder=out_folder,
save_spectral=False,
is_huge=True,
reproject_type='all',
common_beam=True,
verbose=True,
chunk=40)
|
<commit_before><commit_msg>Make a B+C config HI cube matched to the C-only<commit_after>
|
'''
For comparison, smooth and regrid to the 14B data.
'''
import os
from cube_analysis.reprojection import reproject_cube
from paths import (data_path, fourteenB_wGBT_HI_file_dict,
seventeenB_02kms_wGBT_HI_file_dict,
seventeenB_HI_data_02kms_wGBT_path)
out_folder = seventeenB_HI_data_02kms_wGBT_path("14B_match",
no_check=True)
if not os.path.exists(out_folder):
os.mkdir(out_folder)
out_name = seventeenB_02kms_wGBT_HI_file_dict['Cube'].split("/")[-1].rstrip(".fits") + \
".14B_match.fits"
reproject_cube(seventeenB_02kms_wGBT_HI_file_dict['Cube'],
fourteenB_wGBT_HI_file_dict['Cube'],
out_name,
output_folder=out_folder,
save_spectral=False,
is_huge=True,
reproject_type='all',
common_beam=True,
verbose=True,
chunk=40)
|
Make a B+C config HI cube matched to the C-only
'''
For comparison, smooth and regrid to the 14B data.
'''
import os
from cube_analysis.reprojection import reproject_cube
from paths import (data_path, fourteenB_wGBT_HI_file_dict,
seventeenB_02kms_wGBT_HI_file_dict,
seventeenB_HI_data_02kms_wGBT_path)
out_folder = seventeenB_HI_data_02kms_wGBT_path("14B_match",
no_check=True)
if not os.path.exists(out_folder):
os.mkdir(out_folder)
out_name = seventeenB_02kms_wGBT_HI_file_dict['Cube'].split("/")[-1].rstrip(".fits") + \
".14B_match.fits"
reproject_cube(seventeenB_02kms_wGBT_HI_file_dict['Cube'],
fourteenB_wGBT_HI_file_dict['Cube'],
out_name,
output_folder=out_folder,
save_spectral=False,
is_huge=True,
reproject_type='all',
common_beam=True,
verbose=True,
chunk=40)
|
<commit_before><commit_msg>Make a B+C config HI cube matched to the C-only<commit_after>
'''
For comparison, smooth and regrid to the 14B data.
'''
import os
from cube_analysis.reprojection import reproject_cube
from paths import (data_path, fourteenB_wGBT_HI_file_dict,
seventeenB_02kms_wGBT_HI_file_dict,
seventeenB_HI_data_02kms_wGBT_path)
out_folder = seventeenB_HI_data_02kms_wGBT_path("14B_match",
no_check=True)
if not os.path.exists(out_folder):
os.mkdir(out_folder)
out_name = seventeenB_02kms_wGBT_HI_file_dict['Cube'].split("/")[-1].rstrip(".fits") + \
".14B_match.fits"
reproject_cube(seventeenB_02kms_wGBT_HI_file_dict['Cube'],
fourteenB_wGBT_HI_file_dict['Cube'],
out_name,
output_folder=out_folder,
save_spectral=False,
is_huge=True,
reproject_type='all',
common_beam=True,
verbose=True,
chunk=40)
|
|
ab4d6ee78e6702b80947a88f925857a2134886ee
|
convertv2v1.py
|
convertv2v1.py
|
#!/usr/bin/env python3
import json
import argparse
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--oldnodes', action='store',
help='v2 nodes file you want to convert',required=True)
parser.add_argument('-new', '--newnodes', action='store',
help='nodes file you want to store',required=True)
args = parser.parse_args()
options = vars(args)
oldnodes_fn = os.path.realpath(options['oldnodes'])
newnodes_fn = os.path.realpath(options['newnodes'])
newnodedb = {'nodes': dict()}
# read nodedb state from node.json
try:
with open(oldnodes_fn, 'r', encoding=('UTF-8')) as oldnodedb_handle:
nodedb = json.load(oldnodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
for oldnode in nodedb['nodes']:
node_id = oldnode['nodeinfo']['node_id']
newnodedb['nodes'][node_id] = oldnode
newnodedb['timestamp'] = nodedb['timestamp']
newnodedb['version'] = 1
# write processed data to dest dir
with open(newnodes_fn, 'w') as f:
json.dump(newnodedb, f)
|
Add script to convert nodes.json version 2 to version 1
|
Add script to convert nodes.json version 2 to version 1
|
Python
|
bsd-3-clause
|
freifunk-mwu/ffmap-backend,freifunk-mwu/ffmap-backend
|
Add script to convert nodes.json version 2 to version 1
|
#!/usr/bin/env python3
import json
import argparse
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--oldnodes', action='store',
help='v2 nodes file you want to convert',required=True)
parser.add_argument('-new', '--newnodes', action='store',
help='nodes file you want to store',required=True)
args = parser.parse_args()
options = vars(args)
oldnodes_fn = os.path.realpath(options['oldnodes'])
newnodes_fn = os.path.realpath(options['newnodes'])
newnodedb = {'nodes': dict()}
# read nodedb state from node.json
try:
with open(oldnodes_fn, 'r', encoding=('UTF-8')) as oldnodedb_handle:
nodedb = json.load(oldnodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
for oldnode in nodedb['nodes']:
node_id = oldnode['nodeinfo']['node_id']
newnodedb['nodes'][node_id] = oldnode
newnodedb['timestamp'] = nodedb['timestamp']
newnodedb['version'] = 1
# write processed data to dest dir
with open(newnodes_fn, 'w') as f:
json.dump(newnodedb, f)
|
<commit_before><commit_msg>Add script to convert nodes.json version 2 to version 1<commit_after>
|
#!/usr/bin/env python3
import json
import argparse
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--oldnodes', action='store',
help='v2 nodes file you want to convert',required=True)
parser.add_argument('-new', '--newnodes', action='store',
help='nodes file you want to store',required=True)
args = parser.parse_args()
options = vars(args)
oldnodes_fn = os.path.realpath(options['oldnodes'])
newnodes_fn = os.path.realpath(options['newnodes'])
newnodedb = {'nodes': dict()}
# read nodedb state from node.json
try:
with open(oldnodes_fn, 'r', encoding=('UTF-8')) as oldnodedb_handle:
nodedb = json.load(oldnodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
for oldnode in nodedb['nodes']:
node_id = oldnode['nodeinfo']['node_id']
newnodedb['nodes'][node_id] = oldnode
newnodedb['timestamp'] = nodedb['timestamp']
newnodedb['version'] = 1
# write processed data to dest dir
with open(newnodes_fn, 'w') as f:
json.dump(newnodedb, f)
|
Add script to convert nodes.json version 2 to version 1#!/usr/bin/env python3
import json
import argparse
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--oldnodes', action='store',
help='v2 nodes file you want to convert',required=True)
parser.add_argument('-new', '--newnodes', action='store',
help='nodes file you want to store',required=True)
args = parser.parse_args()
options = vars(args)
oldnodes_fn = os.path.realpath(options['oldnodes'])
newnodes_fn = os.path.realpath(options['newnodes'])
newnodedb = {'nodes': dict()}
# read nodedb state from node.json
try:
with open(oldnodes_fn, 'r', encoding=('UTF-8')) as oldnodedb_handle:
nodedb = json.load(oldnodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
for oldnode in nodedb['nodes']:
node_id = oldnode['nodeinfo']['node_id']
newnodedb['nodes'][node_id] = oldnode
newnodedb['timestamp'] = nodedb['timestamp']
newnodedb['version'] = 1
# write processed data to dest dir
with open(newnodes_fn, 'w') as f:
json.dump(newnodedb, f)
|
<commit_before><commit_msg>Add script to convert nodes.json version 2 to version 1<commit_after>#!/usr/bin/env python3
import json
import argparse
import os
import sys
parser = argparse.ArgumentParser()
parser.add_argument('-o', '--oldnodes', action='store',
help='v2 nodes file you want to convert',required=True)
parser.add_argument('-new', '--newnodes', action='store',
help='nodes file you want to store',required=True)
args = parser.parse_args()
options = vars(args)
oldnodes_fn = os.path.realpath(options['oldnodes'])
newnodes_fn = os.path.realpath(options['newnodes'])
newnodedb = {'nodes': dict()}
# read nodedb state from node.json
try:
with open(oldnodes_fn, 'r', encoding=('UTF-8')) as oldnodedb_handle:
nodedb = json.load(oldnodedb_handle)
except IOError:
nodedb = {'nodes': dict()}
for oldnode in nodedb['nodes']:
node_id = oldnode['nodeinfo']['node_id']
newnodedb['nodes'][node_id] = oldnode
newnodedb['timestamp'] = nodedb['timestamp']
newnodedb['version'] = 1
# write processed data to dest dir
with open(newnodes_fn, 'w') as f:
json.dump(newnodedb, f)
|
|
d5315907c25a42b6275b43b49f6e24ae72308c5b
|
utils/image_to_calc.py
|
utils/image_to_calc.py
|
#!/usr/bin/env python
import sys, os
import serial
from PIL import Image
if not len(sys.argv) == 2:
print sys.argv[0], "/path/to/image"
sys.exit(1)
filepath = sys.argv[1]
im = Image.open(filepath)
rgb_im = im.convert('RGB')
width, height = im.size
if not width == 96 or not height == 64:
print "Image wrong size. Must be 96x64."
sys.exit(1)
bytes = []
for y in range(0, height):
for byteIndex in range(0, width / 8):
byte = 0
for bit in range(0, 8):
x = byteIndex * 8 + bit
r, g, b = rgb_im.getpixel((x, y))
v = (r + g + b) / 3.0
if v < 128:
byte |= 0x80 >> bit
bytes += [byte]
ser = serial.Serial("/dev/ttyUSB0", 500000)
for byte in bytes:
x = ser.write(chr(byte))
ser.close()
print "sent", len(bytes), "bytes"
|
Send images to calculator over serial.
|
Send images to calculator over serial.
|
Python
|
mit
|
jmptable/deshellator,jmptable/deshellator,jmptable/deshellator,jmptable/deshellator,jmptable/deshellator
|
Send images to calculator over serial.
|
#!/usr/bin/env python
import sys, os
import serial
from PIL import Image
if not len(sys.argv) == 2:
print sys.argv[0], "/path/to/image"
sys.exit(1)
filepath = sys.argv[1]
im = Image.open(filepath)
rgb_im = im.convert('RGB')
width, height = im.size
if not width == 96 or not height == 64:
print "Image wrong size. Must be 96x64."
sys.exit(1)
bytes = []
for y in range(0, height):
for byteIndex in range(0, width / 8):
byte = 0
for bit in range(0, 8):
x = byteIndex * 8 + bit
r, g, b = rgb_im.getpixel((x, y))
v = (r + g + b) / 3.0
if v < 128:
byte |= 0x80 >> bit
bytes += [byte]
ser = serial.Serial("/dev/ttyUSB0", 500000)
for byte in bytes:
x = ser.write(chr(byte))
ser.close()
print "sent", len(bytes), "bytes"
|
<commit_before><commit_msg>Send images to calculator over serial.<commit_after>
|
#!/usr/bin/env python
import sys, os
import serial
from PIL import Image
if not len(sys.argv) == 2:
print sys.argv[0], "/path/to/image"
sys.exit(1)
filepath = sys.argv[1]
im = Image.open(filepath)
rgb_im = im.convert('RGB')
width, height = im.size
if not width == 96 or not height == 64:
print "Image wrong size. Must be 96x64."
sys.exit(1)
bytes = []
for y in range(0, height):
for byteIndex in range(0, width / 8):
byte = 0
for bit in range(0, 8):
x = byteIndex * 8 + bit
r, g, b = rgb_im.getpixel((x, y))
v = (r + g + b) / 3.0
if v < 128:
byte |= 0x80 >> bit
bytes += [byte]
ser = serial.Serial("/dev/ttyUSB0", 500000)
for byte in bytes:
x = ser.write(chr(byte))
ser.close()
print "sent", len(bytes), "bytes"
|
Send images to calculator over serial.#!/usr/bin/env python
import sys, os
import serial
from PIL import Image
if not len(sys.argv) == 2:
print sys.argv[0], "/path/to/image"
sys.exit(1)
filepath = sys.argv[1]
im = Image.open(filepath)
rgb_im = im.convert('RGB')
width, height = im.size
if not width == 96 or not height == 64:
print "Image wrong size. Must be 96x64."
sys.exit(1)
bytes = []
for y in range(0, height):
for byteIndex in range(0, width / 8):
byte = 0
for bit in range(0, 8):
x = byteIndex * 8 + bit
r, g, b = rgb_im.getpixel((x, y))
v = (r + g + b) / 3.0
if v < 128:
byte |= 0x80 >> bit
bytes += [byte]
ser = serial.Serial("/dev/ttyUSB0", 500000)
for byte in bytes:
x = ser.write(chr(byte))
ser.close()
print "sent", len(bytes), "bytes"
|
<commit_before><commit_msg>Send images to calculator over serial.<commit_after>#!/usr/bin/env python
import sys, os
import serial
from PIL import Image
if not len(sys.argv) == 2:
print sys.argv[0], "/path/to/image"
sys.exit(1)
filepath = sys.argv[1]
im = Image.open(filepath)
rgb_im = im.convert('RGB')
width, height = im.size
if not width == 96 or not height == 64:
print "Image wrong size. Must be 96x64."
sys.exit(1)
bytes = []
for y in range(0, height):
for byteIndex in range(0, width / 8):
byte = 0
for bit in range(0, 8):
x = byteIndex * 8 + bit
r, g, b = rgb_im.getpixel((x, y))
v = (r + g + b) / 3.0
if v < 128:
byte |= 0x80 >> bit
bytes += [byte]
ser = serial.Serial("/dev/ttyUSB0", 500000)
for byte in bytes:
x = ser.write(chr(byte))
ser.close()
print "sent", len(bytes), "bytes"
|
|
a37d2784ef0081fc744fe49b096f0667a4eee2d0
|
tests/test_listings.py
|
tests/test_listings.py
|
# -*- coding: utf-8 -*-
import pytest
import iso3166
def test_country_list():
country_list = iso3166.countries
assert len(country_list) > 100
assert all(isinstance(c, iso3166.Country) for c in country_list)
def test_by_name():
table = iso3166.countries_by_name
assert len(table) >= len(iso3166.countries)
assert table["AFGHANISTAN"].name == "Afghanistan"
def test_by_number():
table = iso3166.countries_by_numeric
assert len(table) >= len(iso3166.countries)
assert table["008"].name == "Albania"
def test_by_alpha2():
table = iso3166.countries_by_alpha2
assert len(table) >= len(iso3166.countries)
assert table['AE'].name == "United Arab Emirates"
def test_by_alpha3():
table = iso3166.countries_by_alpha3
assert len(table) >= len(iso3166.countries)
assert table['AFG'].name == 'Afghanistan'
|
Add tests for the fields updated in ee3cd62
|
Add tests for the fields updated in ee3cd62
|
Python
|
mit
|
deactivated/python-iso3166
|
Add tests for the fields updated in ee3cd62
|
# -*- coding: utf-8 -*-
import pytest
import iso3166
def test_country_list():
country_list = iso3166.countries
assert len(country_list) > 100
assert all(isinstance(c, iso3166.Country) for c in country_list)
def test_by_name():
table = iso3166.countries_by_name
assert len(table) >= len(iso3166.countries)
assert table["AFGHANISTAN"].name == "Afghanistan"
def test_by_number():
table = iso3166.countries_by_numeric
assert len(table) >= len(iso3166.countries)
assert table["008"].name == "Albania"
def test_by_alpha2():
table = iso3166.countries_by_alpha2
assert len(table) >= len(iso3166.countries)
assert table['AE'].name == "United Arab Emirates"
def test_by_alpha3():
table = iso3166.countries_by_alpha3
assert len(table) >= len(iso3166.countries)
assert table['AFG'].name == 'Afghanistan'
|
<commit_before><commit_msg>Add tests for the fields updated in ee3cd62<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
import iso3166
def test_country_list():
country_list = iso3166.countries
assert len(country_list) > 100
assert all(isinstance(c, iso3166.Country) for c in country_list)
def test_by_name():
table = iso3166.countries_by_name
assert len(table) >= len(iso3166.countries)
assert table["AFGHANISTAN"].name == "Afghanistan"
def test_by_number():
table = iso3166.countries_by_numeric
assert len(table) >= len(iso3166.countries)
assert table["008"].name == "Albania"
def test_by_alpha2():
table = iso3166.countries_by_alpha2
assert len(table) >= len(iso3166.countries)
assert table['AE'].name == "United Arab Emirates"
def test_by_alpha3():
table = iso3166.countries_by_alpha3
assert len(table) >= len(iso3166.countries)
assert table['AFG'].name == 'Afghanistan'
|
Add tests for the fields updated in ee3cd62# -*- coding: utf-8 -*-
import pytest
import iso3166
def test_country_list():
country_list = iso3166.countries
assert len(country_list) > 100
assert all(isinstance(c, iso3166.Country) for c in country_list)
def test_by_name():
table = iso3166.countries_by_name
assert len(table) >= len(iso3166.countries)
assert table["AFGHANISTAN"].name == "Afghanistan"
def test_by_number():
table = iso3166.countries_by_numeric
assert len(table) >= len(iso3166.countries)
assert table["008"].name == "Albania"
def test_by_alpha2():
table = iso3166.countries_by_alpha2
assert len(table) >= len(iso3166.countries)
assert table['AE'].name == "United Arab Emirates"
def test_by_alpha3():
table = iso3166.countries_by_alpha3
assert len(table) >= len(iso3166.countries)
assert table['AFG'].name == 'Afghanistan'
|
<commit_before><commit_msg>Add tests for the fields updated in ee3cd62<commit_after># -*- coding: utf-8 -*-
import pytest
import iso3166
def test_country_list():
country_list = iso3166.countries
assert len(country_list) > 100
assert all(isinstance(c, iso3166.Country) for c in country_list)
def test_by_name():
table = iso3166.countries_by_name
assert len(table) >= len(iso3166.countries)
assert table["AFGHANISTAN"].name == "Afghanistan"
def test_by_number():
table = iso3166.countries_by_numeric
assert len(table) >= len(iso3166.countries)
assert table["008"].name == "Albania"
def test_by_alpha2():
table = iso3166.countries_by_alpha2
assert len(table) >= len(iso3166.countries)
assert table['AE'].name == "United Arab Emirates"
def test_by_alpha3():
table = iso3166.countries_by_alpha3
assert len(table) >= len(iso3166.countries)
assert table['AFG'].name == 'Afghanistan'
|
|
876a2934086f05514a66f97fbebed2e0bd62c7f6
|
examples/gi_example.py
|
examples/gi_example.py
|
# Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import byref, POINTER, cast
import sys
sys.path.insert(0, '..')
from pgi.gitypes import *
gi_init()
repo = GIRepository.get_default()
error = POINTER(GError)()
repo.require("GLib", "2.0", GIRepositoryLoadFlags.LAZY, byref(error))
check_gerror(error)
base_info = repo.find_by_name("GLib", "warn_message")
if not base_info:
raise Exception
function_info = cast(base_info, GIFunctionInfoPtr)
in_args_type = GIArgument * 5
in_args = in_args_type(GIArgument(v_string="GITYPES"),
GIArgument(v_string=__file__),
GIArgument(v_uint=42),
GIArgument(v_string="main"),
GIArgument(v_string="hello world"),
)
retval = GIArgument()
error = POINTER(GError)()
function_info.invoke(in_args, 5, None, 0, byref(retval), byref(error))
check_gerror(error)
base_info.unref()
|
Add back the old gir example
|
Add back the old gir example
|
Python
|
lgpl-2.1
|
lazka/pgi,lazka/pgi
|
Add back the old gir example
|
# Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import byref, POINTER, cast
import sys
sys.path.insert(0, '..')
from pgi.gitypes import *
gi_init()
repo = GIRepository.get_default()
error = POINTER(GError)()
repo.require("GLib", "2.0", GIRepositoryLoadFlags.LAZY, byref(error))
check_gerror(error)
base_info = repo.find_by_name("GLib", "warn_message")
if not base_info:
raise Exception
function_info = cast(base_info, GIFunctionInfoPtr)
in_args_type = GIArgument * 5
in_args = in_args_type(GIArgument(v_string="GITYPES"),
GIArgument(v_string=__file__),
GIArgument(v_uint=42),
GIArgument(v_string="main"),
GIArgument(v_string="hello world"),
)
retval = GIArgument()
error = POINTER(GError)()
function_info.invoke(in_args, 5, None, 0, byref(retval), byref(error))
check_gerror(error)
base_info.unref()
|
<commit_before><commit_msg>Add back the old gir example<commit_after>
|
# Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import byref, POINTER, cast
import sys
sys.path.insert(0, '..')
from pgi.gitypes import *
gi_init()
repo = GIRepository.get_default()
error = POINTER(GError)()
repo.require("GLib", "2.0", GIRepositoryLoadFlags.LAZY, byref(error))
check_gerror(error)
base_info = repo.find_by_name("GLib", "warn_message")
if not base_info:
raise Exception
function_info = cast(base_info, GIFunctionInfoPtr)
in_args_type = GIArgument * 5
in_args = in_args_type(GIArgument(v_string="GITYPES"),
GIArgument(v_string=__file__),
GIArgument(v_uint=42),
GIArgument(v_string="main"),
GIArgument(v_string="hello world"),
)
retval = GIArgument()
error = POINTER(GError)()
function_info.invoke(in_args, 5, None, 0, byref(retval), byref(error))
check_gerror(error)
base_info.unref()
|
Add back the old gir example# Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import byref, POINTER, cast
import sys
sys.path.insert(0, '..')
from pgi.gitypes import *
gi_init()
repo = GIRepository.get_default()
error = POINTER(GError)()
repo.require("GLib", "2.0", GIRepositoryLoadFlags.LAZY, byref(error))
check_gerror(error)
base_info = repo.find_by_name("GLib", "warn_message")
if not base_info:
raise Exception
function_info = cast(base_info, GIFunctionInfoPtr)
in_args_type = GIArgument * 5
in_args = in_args_type(GIArgument(v_string="GITYPES"),
GIArgument(v_string=__file__),
GIArgument(v_uint=42),
GIArgument(v_string="main"),
GIArgument(v_string="hello world"),
)
retval = GIArgument()
error = POINTER(GError)()
function_info.invoke(in_args, 5, None, 0, byref(retval), byref(error))
check_gerror(error)
base_info.unref()
|
<commit_before><commit_msg>Add back the old gir example<commit_after># Copyright 2012 Christoph Reiter
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
from ctypes import byref, POINTER, cast
import sys
sys.path.insert(0, '..')
from pgi.gitypes import *
gi_init()
repo = GIRepository.get_default()
error = POINTER(GError)()
repo.require("GLib", "2.0", GIRepositoryLoadFlags.LAZY, byref(error))
check_gerror(error)
base_info = repo.find_by_name("GLib", "warn_message")
if not base_info:
raise Exception
function_info = cast(base_info, GIFunctionInfoPtr)
in_args_type = GIArgument * 5
in_args = in_args_type(GIArgument(v_string="GITYPES"),
GIArgument(v_string=__file__),
GIArgument(v_uint=42),
GIArgument(v_string="main"),
GIArgument(v_string="hello world"),
)
retval = GIArgument()
error = POINTER(GError)()
function_info.invoke(in_args, 5, None, 0, byref(retval), byref(error))
check_gerror(error)
base_info.unref()
|
|
0842aeca5372b3c4c14ad3719f0d53f29671a283
|
museum_site/migrations/0050_remove_article_preview.py
|
museum_site/migrations/0050_remove_article_preview.py
|
# Generated by Django 3.1.7 on 2021-04-27 18:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0049_auto_20210324_1957'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='preview',
),
]
|
Remove preview field from DB
|
Remove preview field from DB
|
Python
|
mit
|
DrDos0016/z2,DrDos0016/z2,DrDos0016/z2
|
Remove preview field from DB
|
# Generated by Django 3.1.7 on 2021-04-27 18:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0049_auto_20210324_1957'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='preview',
),
]
|
<commit_before><commit_msg>Remove preview field from DB<commit_after>
|
# Generated by Django 3.1.7 on 2021-04-27 18:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0049_auto_20210324_1957'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='preview',
),
]
|
Remove preview field from DB# Generated by Django 3.1.7 on 2021-04-27 18:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0049_auto_20210324_1957'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='preview',
),
]
|
<commit_before><commit_msg>Remove preview field from DB<commit_after># Generated by Django 3.1.7 on 2021-04-27 18:26
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('museum_site', '0049_auto_20210324_1957'),
]
operations = [
migrations.RemoveField(
model_name='article',
name='preview',
),
]
|
|
a0987218331fa7abe11a69226f54545c6198596a
|
test/workers/test_plan.py
|
test/workers/test_plan.py
|
import random
import syft as sy
import torch
import torch as th
import torch.nn as nn
import torch.nn.functional as F
def test_plan_remote_function(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = F.relu(x_ptr)
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 0, 3, 4])).all()
def test_plan_remote_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = x_ptr.abs()
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
def test_plan_remote_inplace_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
x_ptr.abs_()
x_back = x_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
|
Add some tests on Plan Worker
|
Add some tests on Plan Worker
|
Python
|
apache-2.0
|
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
|
Add some tests on Plan Worker
|
import random
import syft as sy
import torch
import torch as th
import torch.nn as nn
import torch.nn.functional as F
def test_plan_remote_function(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = F.relu(x_ptr)
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 0, 3, 4])).all()
def test_plan_remote_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = x_ptr.abs()
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
def test_plan_remote_inplace_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
x_ptr.abs_()
x_back = x_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
|
<commit_before><commit_msg>Add some tests on Plan Worker<commit_after>
|
import random
import syft as sy
import torch
import torch as th
import torch.nn as nn
import torch.nn.functional as F
def test_plan_remote_function(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = F.relu(x_ptr)
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 0, 3, 4])).all()
def test_plan_remote_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = x_ptr.abs()
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
def test_plan_remote_inplace_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
x_ptr.abs_()
x_back = x_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
|
Add some tests on Plan Workerimport random
import syft as sy
import torch
import torch as th
import torch.nn as nn
import torch.nn.functional as F
def test_plan_remote_function(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = F.relu(x_ptr)
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 0, 3, 4])).all()
def test_plan_remote_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = x_ptr.abs()
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
def test_plan_remote_inplace_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
x_ptr.abs_()
x_back = x_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
|
<commit_before><commit_msg>Add some tests on Plan Worker<commit_after>import random
import syft as sy
import torch
import torch as th
import torch.nn as nn
import torch.nn.functional as F
def test_plan_remote_function(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = F.relu(x_ptr)
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 0, 3, 4])).all()
def test_plan_remote_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
res_ptr = x_ptr.abs()
x_back = res_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
def test_plan_remote_inplace_method(hook):
plan_worker = sy.Plan(hook, id=str(int(10e10 * random.random())))
x = torch.tensor([1, -1, 3, 4])
x_ptr = x.send(plan_worker)
x_ptr.abs_()
x_back = x_ptr.get()
assert (x_back == torch.tensor([1, 1, 3, 4])).all()
|
|
41609c100fb23821f64457774d8bc7fc9e578225
|
tests/test_computer.py
|
tests/test_computer.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
|
Add test fro Computer class
|
Add test fro Computer class
|
Python
|
mit
|
lord63/wonderful_bing
|
Add test fro Computer class
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
|
<commit_before><commit_msg>Add test fro Computer class<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
|
Add test fro Computer class#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
|
<commit_before><commit_msg>Add test fro Computer class<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from os import path
import mock
import pytest
from wonderful_bing.wonderful_bing import Computer
@pytest.fixture
def computer():
computer = Computer()
return computer
def test_computer(computer):
gnome_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.gnome.desktop.background "
"picture-uri file://{0}")
mate_based = ("DISPLAY=:0 GSETTINGS_BACKEND=dconf "
"/usr/bin/gsettings set org.mate.background "
"picture-filename '{0}'")
xfce_based = ("DISPLAY=:0 xfconf-query -c xfce4-desktop "
"-p /backdrop/screen0/monitor0/image-path -s {0}")
assert computer._get_command('gnome') == gnome_based
assert computer._get_command('gnome2') == gnome_based
assert computer._get_command('cinnamon') == gnome_based
assert computer._get_command('mate') == mate_based
assert computer._get_command('xfce4') == xfce_based
assert computer._get_command('blablabla') is None
def test_set_wallpaper(computer):
with pytest.raises(SystemExit):
computer.set_wallpaper('blablabla', 'tmp/blabla.jpg')
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.set_wallpaper('gnome', '/tmp/blabla.jpg')
command = computer._get_command('gnome').format('/tmp/blabla.jpg')
subprocess.Popen.assert_called_once_with(command, shell=True)
def test_show_notify(computer):
with mock.patch('wonderful_bing.wonderful_bing.subprocess') as subprocess:
computer.show_notify('Hello, world')
notify_icon = path.join(
path.dirname(path.dirname(path.realpath(__file__))),
'wonderful_bing/img/icon.png')
subprocess.Popen.assert_called_once_with(
["notify-send", "-a", "wonderful_bing", "-i",
notify_icon, "Today's Picture Story", "Hello, world"])
|
|
ad5f8440a4907f669a94470423dd937aa606412a
|
filter_to_join_part.py
|
filter_to_join_part.py
|
#!/usr/bin/env python3
import io
import os
# Usage: run as `filter_to_join_part.py`
# Put inputs in the directory 'input_logs' with '.txt' extension
# Outputs will be created in 'output_logs', overwriting contents
# Edit important_text to find different entries in logs
important_text = ["has joined", "has left"]
def main():
for file in os.listdir("input_logs"):
if file.endswith(".txt"):
with open(os.path.join("input_logs", file), "r") as raw_log:
with open(os.path.join("output_logs", os.path.basename(file)), "w") as filtered_log:
for line in raw_log:
if any(x in line for x in important_text):
filtered_log.write(line)
# Call the main() function to begin the program.
main()
|
Add a simple, static log filtering utility
|
Add a simple, static log filtering utility
|
Python
|
unlicense
|
thomasrussellmurphy/trm-snippets,thomasrussellmurphy/trm-snippets
|
Add a simple, static log filtering utility
|
#!/usr/bin/env python3
import io
import os
# Usage: run as `filter_to_join_part.py`
# Put inputs in the directory 'input_logs' with '.txt' extension
# Outputs will be created in 'output_logs', overwriting contents
# Edit important_text to find different entries in logs
important_text = ["has joined", "has left"]
def main():
for file in os.listdir("input_logs"):
if file.endswith(".txt"):
with open(os.path.join("input_logs", file), "r") as raw_log:
with open(os.path.join("output_logs", os.path.basename(file)), "w") as filtered_log:
for line in raw_log:
if any(x in line for x in important_text):
filtered_log.write(line)
# Call the main() function to begin the program.
main()
|
<commit_before><commit_msg>Add a simple, static log filtering utility<commit_after>
|
#!/usr/bin/env python3
import io
import os
# Usage: run as `filter_to_join_part.py`
# Put inputs in the directory 'input_logs' with '.txt' extension
# Outputs will be created in 'output_logs', overwriting contents
# Edit important_text to find different entries in logs
important_text = ["has joined", "has left"]
def main():
for file in os.listdir("input_logs"):
if file.endswith(".txt"):
with open(os.path.join("input_logs", file), "r") as raw_log:
with open(os.path.join("output_logs", os.path.basename(file)), "w") as filtered_log:
for line in raw_log:
if any(x in line for x in important_text):
filtered_log.write(line)
# Call the main() function to begin the program.
main()
|
Add a simple, static log filtering utility#!/usr/bin/env python3
import io
import os
# Usage: run as `filter_to_join_part.py`
# Put inputs in the directory 'input_logs' with '.txt' extension
# Outputs will be created in 'output_logs', overwriting contents
# Edit important_text to find different entries in logs
important_text = ["has joined", "has left"]
def main():
for file in os.listdir("input_logs"):
if file.endswith(".txt"):
with open(os.path.join("input_logs", file), "r") as raw_log:
with open(os.path.join("output_logs", os.path.basename(file)), "w") as filtered_log:
for line in raw_log:
if any(x in line for x in important_text):
filtered_log.write(line)
# Call the main() function to begin the program.
main()
|
<commit_before><commit_msg>Add a simple, static log filtering utility<commit_after>#!/usr/bin/env python3
import io
import os
# Usage: run as `filter_to_join_part.py`
# Put inputs in the directory 'input_logs' with '.txt' extension
# Outputs will be created in 'output_logs', overwriting contents
# Edit important_text to find different entries in logs
important_text = ["has joined", "has left"]
def main():
for file in os.listdir("input_logs"):
if file.endswith(".txt"):
with open(os.path.join("input_logs", file), "r") as raw_log:
with open(os.path.join("output_logs", os.path.basename(file)), "w") as filtered_log:
for line in raw_log:
if any(x in line for x in important_text):
filtered_log.write(line)
# Call the main() function to begin the program.
main()
|
|
46a2bcda270b76b62a7f125836fa0a374258ead4
|
examples/translations/chinese_test_1.py
|
examples/translations/chinese_test_1.py
|
from seleniumbase.translate.chinese import 硒测试用例
class 测试类(硒测试用例):
def test_例子1(self):
self.开启网址("https://xkcd.in/comic?lg=cn&id=353")
self.断言标题("Python - XKCD中文站")
self.断言元素("#content div.comic-body")
self.断言文本("上漫画")
self.单击("div.nextLink")
self.断言文本("老妈的逆袭", "#content h1")
self.单击链接文本("下一篇")
self.断言文本("敲桌子", "#content h1")
self.断言文本("有时候无聊就是最棒的乐趣")
self.回去()
self.单击链接文本("兰德尔·门罗")
self.断言文本("兰德尔·门罗", "#firstHeading")
self.更新文本("#searchInput", "程式设计")
self.单击("#searchButton")
self.断言文本("程序设计", "#firstHeading")
|
Add a Chinese language example test
|
Add a Chinese language example test
|
Python
|
mit
|
mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase
|
Add a Chinese language example test
|
from seleniumbase.translate.chinese import 硒测试用例
class 测试类(硒测试用例):
def test_例子1(self):
self.开启网址("https://xkcd.in/comic?lg=cn&id=353")
self.断言标题("Python - XKCD中文站")
self.断言元素("#content div.comic-body")
self.断言文本("上漫画")
self.单击("div.nextLink")
self.断言文本("老妈的逆袭", "#content h1")
self.单击链接文本("下一篇")
self.断言文本("敲桌子", "#content h1")
self.断言文本("有时候无聊就是最棒的乐趣")
self.回去()
self.单击链接文本("兰德尔·门罗")
self.断言文本("兰德尔·门罗", "#firstHeading")
self.更新文本("#searchInput", "程式设计")
self.单击("#searchButton")
self.断言文本("程序设计", "#firstHeading")
|
<commit_before><commit_msg>Add a Chinese language example test<commit_after>
|
from seleniumbase.translate.chinese import 硒测试用例
class 测试类(硒测试用例):
def test_例子1(self):
self.开启网址("https://xkcd.in/comic?lg=cn&id=353")
self.断言标题("Python - XKCD中文站")
self.断言元素("#content div.comic-body")
self.断言文本("上漫画")
self.单击("div.nextLink")
self.断言文本("老妈的逆袭", "#content h1")
self.单击链接文本("下一篇")
self.断言文本("敲桌子", "#content h1")
self.断言文本("有时候无聊就是最棒的乐趣")
self.回去()
self.单击链接文本("兰德尔·门罗")
self.断言文本("兰德尔·门罗", "#firstHeading")
self.更新文本("#searchInput", "程式设计")
self.单击("#searchButton")
self.断言文本("程序设计", "#firstHeading")
|
Add a Chinese language example testfrom seleniumbase.translate.chinese import 硒测试用例
class 测试类(硒测试用例):
def test_例子1(self):
self.开启网址("https://xkcd.in/comic?lg=cn&id=353")
self.断言标题("Python - XKCD中文站")
self.断言元素("#content div.comic-body")
self.断言文本("上漫画")
self.单击("div.nextLink")
self.断言文本("老妈的逆袭", "#content h1")
self.单击链接文本("下一篇")
self.断言文本("敲桌子", "#content h1")
self.断言文本("有时候无聊就是最棒的乐趣")
self.回去()
self.单击链接文本("兰德尔·门罗")
self.断言文本("兰德尔·门罗", "#firstHeading")
self.更新文本("#searchInput", "程式设计")
self.单击("#searchButton")
self.断言文本("程序设计", "#firstHeading")
|
<commit_before><commit_msg>Add a Chinese language example test<commit_after>from seleniumbase.translate.chinese import 硒测试用例
class 测试类(硒测试用例):
def test_例子1(self):
self.开启网址("https://xkcd.in/comic?lg=cn&id=353")
self.断言标题("Python - XKCD中文站")
self.断言元素("#content div.comic-body")
self.断言文本("上漫画")
self.单击("div.nextLink")
self.断言文本("老妈的逆袭", "#content h1")
self.单击链接文本("下一篇")
self.断言文本("敲桌子", "#content h1")
self.断言文本("有时候无聊就是最棒的乐趣")
self.回去()
self.单击链接文本("兰德尔·门罗")
self.断言文本("兰德尔·门罗", "#firstHeading")
self.更新文本("#searchInput", "程式设计")
self.单击("#searchButton")
self.断言文本("程序设计", "#firstHeading")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.